xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/gimplify.c (revision 154bfe8e089c1a0a4e9ed8414f08d3da90949162)
1 /* Tree lowering pass.  This pass converts the GENERIC functions-as-trees
2    tree representation into the GIMPLE form.
3    Copyright (C) 2002-2018 Free Software Foundation, Inc.
4    Major work done by Sebastian Pop <s.pop@laposte.net>,
5    Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h"		/* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h"	/* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
68 
69 /* Hash set of poisoned variables in a bind expr.  */
70 static hash_set<tree> *asan_poisoned_variables = NULL;
71 
72 enum gimplify_omp_var_data
73 {
74   GOVD_SEEN = 1,
75   GOVD_EXPLICIT = 2,
76   GOVD_SHARED = 4,
77   GOVD_PRIVATE = 8,
78   GOVD_FIRSTPRIVATE = 16,
79   GOVD_LASTPRIVATE = 32,
80   GOVD_REDUCTION = 64,
81   GOVD_LOCAL = 128,
82   GOVD_MAP = 256,
83   GOVD_DEBUG_PRIVATE = 512,
84   GOVD_PRIVATE_OUTER_REF = 1024,
85   GOVD_LINEAR = 2048,
86   GOVD_ALIGNED = 4096,
87 
88   /* Flag for GOVD_MAP: don't copy back.  */
89   GOVD_MAP_TO_ONLY = 8192,
90 
91   /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference.  */
92   GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
93 
94   GOVD_MAP_0LEN_ARRAY = 32768,
95 
96   /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping.  */
97   GOVD_MAP_ALWAYS_TO = 65536,
98 
99   /* Flag for shared vars that are or might be stored to in the region.  */
100   GOVD_WRITTEN = 131072,
101 
102   /* Flag for GOVD_MAP, if it is a forced mapping.  */
103   GOVD_MAP_FORCE = 262144,
104 
105   /* Flag for GOVD_MAP: must be present already.  */
106   GOVD_MAP_FORCE_PRESENT = 524288,
107 
108   GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
109 			   | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
110 			   | GOVD_LOCAL)
111 };
112 
113 
114 enum omp_region_type
115 {
116   ORT_WORKSHARE = 0x00,
117   ORT_SIMD 	= 0x01,
118 
119   ORT_PARALLEL	= 0x02,
120   ORT_COMBINED_PARALLEL = 0x03,
121 
122   ORT_TASK	= 0x04,
123   ORT_UNTIED_TASK = 0x05,
124 
125   ORT_TEAMS	= 0x08,
126   ORT_COMBINED_TEAMS = 0x09,
127 
128   /* Data region.  */
129   ORT_TARGET_DATA = 0x10,
130 
131   /* Data region with offloading.  */
132   ORT_TARGET	= 0x20,
133   ORT_COMBINED_TARGET = 0x21,
134 
135   /* OpenACC variants.  */
136   ORT_ACC	= 0x40,  /* A generic OpenACC region.  */
137   ORT_ACC_DATA	= ORT_ACC | ORT_TARGET_DATA, /* Data construct.  */
138   ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET,  /* Parallel construct */
139   ORT_ACC_KERNELS  = ORT_ACC | ORT_TARGET | 0x80,  /* Kernels construct.  */
140   ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80,  /* Host data.  */
141 
142   /* Dummy OpenMP region, used to disable expansion of
143      DECL_VALUE_EXPRs in taskloop pre body.  */
144   ORT_NONE	= 0x100
145 };
146 
147 /* Gimplify hashtable helper.  */
148 
149 struct gimplify_hasher : free_ptr_hash <elt_t>
150 {
151   static inline hashval_t hash (const elt_t *);
152   static inline bool equal (const elt_t *, const elt_t *);
153 };
154 
155 struct gimplify_ctx
156 {
157   struct gimplify_ctx *prev_context;
158 
159   vec<gbind *> bind_expr_stack;
160   tree temps;
161   gimple_seq conditional_cleanups;
162   tree exit_label;
163   tree return_temp;
164 
165   vec<tree> case_labels;
166   hash_set<tree> *live_switch_vars;
167   /* The formal temporary table.  Should this be persistent?  */
168   hash_table<gimplify_hasher> *temp_htab;
169 
170   int conditions;
171   unsigned into_ssa : 1;
172   unsigned allow_rhs_cond_expr : 1;
173   unsigned in_cleanup_point_expr : 1;
174   unsigned keep_stack : 1;
175   unsigned save_stack : 1;
176   unsigned in_switch_expr : 1;
177 };
178 
179 struct gimplify_omp_ctx
180 {
181   struct gimplify_omp_ctx *outer_context;
182   splay_tree variables;
183   hash_set<tree> *privatized_types;
184   /* Iteration variables in an OMP_FOR.  */
185   vec<tree> loop_iter_var;
186   location_t location;
187   enum omp_clause_default_kind default_kind;
188   enum omp_region_type region_type;
189   bool combined_loop;
190   bool distribute;
191   bool target_map_scalars_firstprivate;
192   bool target_map_pointers_as_0len_arrays;
193   bool target_firstprivatize_array_bases;
194   bool add_safelen1;
195 };
196 
197 static struct gimplify_ctx *gimplify_ctxp;
198 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
199 
200 /* Forward declaration.  */
201 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
202 static hash_map<tree, tree> *oacc_declare_returns;
203 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
204 					   bool (*) (tree), fallback_t, bool);
205 
206 /* Shorter alias name for the above function for use in gimplify.c
207    only.  */
208 
209 static inline void
210 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
211 {
212   gimple_seq_add_stmt_without_update (seq_p, gs);
213 }
214 
215 /* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
216    NULL, a new sequence is allocated.   This function is
217    similar to gimple_seq_add_seq, but does not scan the operands.
218    During gimplification, we need to manipulate statement sequences
219    before the def/use vectors have been constructed.  */
220 
221 static void
222 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
223 {
224   gimple_stmt_iterator si;
225 
226   if (src == NULL)
227     return;
228 
229   si = gsi_last (*dst_p);
230   gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
231 }
232 
233 
234 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
235    and popping gimplify contexts.  */
236 
237 static struct gimplify_ctx *ctx_pool = NULL;
238 
239 /* Return a gimplify context struct from the pool.  */
240 
241 static inline struct gimplify_ctx *
242 ctx_alloc (void)
243 {
244   struct gimplify_ctx * c = ctx_pool;
245 
246   if (c)
247     ctx_pool = c->prev_context;
248   else
249     c = XNEW (struct gimplify_ctx);
250 
251   memset (c, '\0', sizeof (*c));
252   return c;
253 }
254 
255 /* Put gimplify context C back into the pool.  */
256 
257 static inline void
258 ctx_free (struct gimplify_ctx *c)
259 {
260   c->prev_context = ctx_pool;
261   ctx_pool = c;
262 }
263 
264 /* Free allocated ctx stack memory.  */
265 
266 void
267 free_gimplify_stack (void)
268 {
269   struct gimplify_ctx *c;
270 
271   while ((c = ctx_pool))
272     {
273       ctx_pool = c->prev_context;
274       free (c);
275     }
276 }
277 
278 
279 /* Set up a context for the gimplifier.  */
280 
281 void
282 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
283 {
284   struct gimplify_ctx *c = ctx_alloc ();
285 
286   c->prev_context = gimplify_ctxp;
287   gimplify_ctxp = c;
288   gimplify_ctxp->into_ssa = in_ssa;
289   gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
290 }
291 
292 /* Tear down a context for the gimplifier.  If BODY is non-null, then
293    put the temporaries into the outer BIND_EXPR.  Otherwise, put them
294    in the local_decls.
295 
296    BODY is not a sequence, but the first tuple in a sequence.  */
297 
298 void
299 pop_gimplify_context (gimple *body)
300 {
301   struct gimplify_ctx *c = gimplify_ctxp;
302 
303   gcc_assert (c
304               && (!c->bind_expr_stack.exists ()
305 		  || c->bind_expr_stack.is_empty ()));
306   c->bind_expr_stack.release ();
307   gimplify_ctxp = c->prev_context;
308 
309   if (body)
310     declare_vars (c->temps, body, false);
311   else
312     record_vars (c->temps);
313 
314   delete c->temp_htab;
315   c->temp_htab = NULL;
316   ctx_free (c);
317 }
318 
319 /* Push a GIMPLE_BIND tuple onto the stack of bindings.  */
320 
321 static void
322 gimple_push_bind_expr (gbind *bind_stmt)
323 {
324   gimplify_ctxp->bind_expr_stack.reserve (8);
325   gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
326 }
327 
328 /* Pop the first element off the stack of bindings.  */
329 
330 static void
331 gimple_pop_bind_expr (void)
332 {
333   gimplify_ctxp->bind_expr_stack.pop ();
334 }
335 
336 /* Return the first element of the stack of bindings.  */
337 
338 gbind *
339 gimple_current_bind_expr (void)
340 {
341   return gimplify_ctxp->bind_expr_stack.last ();
342 }
343 
344 /* Return the stack of bindings created during gimplification.  */
345 
346 vec<gbind *>
347 gimple_bind_expr_stack (void)
348 {
349   return gimplify_ctxp->bind_expr_stack;
350 }
351 
352 /* Return true iff there is a COND_EXPR between us and the innermost
353    CLEANUP_POINT_EXPR.  This info is used by gimple_push_cleanup.  */
354 
355 static bool
356 gimple_conditional_context (void)
357 {
358   return gimplify_ctxp->conditions > 0;
359 }
360 
361 /* Note that we've entered a COND_EXPR.  */
362 
363 static void
364 gimple_push_condition (void)
365 {
366 #ifdef ENABLE_GIMPLE_CHECKING
367   if (gimplify_ctxp->conditions == 0)
368     gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
369 #endif
370   ++(gimplify_ctxp->conditions);
371 }
372 
373 /* Note that we've left a COND_EXPR.  If we're back at unconditional scope
374    now, add any conditional cleanups we've seen to the prequeue.  */
375 
376 static void
377 gimple_pop_condition (gimple_seq *pre_p)
378 {
379   int conds = --(gimplify_ctxp->conditions);
380 
381   gcc_assert (conds >= 0);
382   if (conds == 0)
383     {
384       gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
385       gimplify_ctxp->conditional_cleanups = NULL;
386     }
387 }
388 
389 /* A stable comparison routine for use with splay trees and DECLs.  */
390 
391 static int
392 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
393 {
394   tree a = (tree) xa;
395   tree b = (tree) xb;
396 
397   return DECL_UID (a) - DECL_UID (b);
398 }
399 
400 /* Create a new omp construct that deals with variable remapping.  */
401 
402 static struct gimplify_omp_ctx *
403 new_omp_context (enum omp_region_type region_type)
404 {
405   struct gimplify_omp_ctx *c;
406 
407   c = XCNEW (struct gimplify_omp_ctx);
408   c->outer_context = gimplify_omp_ctxp;
409   c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
410   c->privatized_types = new hash_set<tree>;
411   c->location = input_location;
412   c->region_type = region_type;
413   if ((region_type & ORT_TASK) == 0)
414     c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
415   else
416     c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
417 
418   return c;
419 }
420 
421 /* Destroy an omp construct that deals with variable remapping.  */
422 
423 static void
424 delete_omp_context (struct gimplify_omp_ctx *c)
425 {
426   splay_tree_delete (c->variables);
427   delete c->privatized_types;
428   c->loop_iter_var.release ();
429   XDELETE (c);
430 }
431 
432 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
433 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
434 
435 /* Both gimplify the statement T and append it to *SEQ_P.  This function
436    behaves exactly as gimplify_stmt, but you don't have to pass T as a
437    reference.  */
438 
439 void
440 gimplify_and_add (tree t, gimple_seq *seq_p)
441 {
442   gimplify_stmt (&t, seq_p);
443 }
444 
445 /* Gimplify statement T into sequence *SEQ_P, and return the first
446    tuple in the sequence of generated tuples for this statement.
447    Return NULL if gimplifying T produced no tuples.  */
448 
449 static gimple *
450 gimplify_and_return_first (tree t, gimple_seq *seq_p)
451 {
452   gimple_stmt_iterator last = gsi_last (*seq_p);
453 
454   gimplify_and_add (t, seq_p);
455 
456   if (!gsi_end_p (last))
457     {
458       gsi_next (&last);
459       return gsi_stmt (last);
460     }
461   else
462     return gimple_seq_first_stmt (*seq_p);
463 }
464 
465 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
466    LHS, or for a call argument.  */
467 
468 static bool
469 is_gimple_mem_rhs (tree t)
470 {
471   /* If we're dealing with a renamable type, either source or dest must be
472      a renamed variable.  */
473   if (is_gimple_reg_type (TREE_TYPE (t)))
474     return is_gimple_val (t);
475   else
476     return is_gimple_val (t) || is_gimple_lvalue (t);
477 }
478 
479 /* Return true if T is a CALL_EXPR or an expression that can be
480    assigned to a temporary.  Note that this predicate should only be
481    used during gimplification.  See the rationale for this in
482    gimplify_modify_expr.  */
483 
484 static bool
485 is_gimple_reg_rhs_or_call (tree t)
486 {
487   return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
488 	  || TREE_CODE (t) == CALL_EXPR);
489 }
490 
491 /* Return true if T is a valid memory RHS or a CALL_EXPR.  Note that
492    this predicate should only be used during gimplification.  See the
493    rationale for this in gimplify_modify_expr.  */
494 
495 static bool
496 is_gimple_mem_rhs_or_call (tree t)
497 {
498   /* If we're dealing with a renamable type, either source or dest must be
499      a renamed variable.  */
500   if (is_gimple_reg_type (TREE_TYPE (t)))
501     return is_gimple_val (t);
502   else
503     return (is_gimple_val (t)
504 	    || is_gimple_lvalue (t)
505 	    || TREE_CLOBBER_P (t)
506 	    || TREE_CODE (t) == CALL_EXPR);
507 }
508 
509 /* Create a temporary with a name derived from VAL.  Subroutine of
510    lookup_tmp_var; nobody else should call this function.  */
511 
512 static inline tree
513 create_tmp_from_val (tree val)
514 {
515   /* Drop all qualifiers and address-space information from the value type.  */
516   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
517   tree var = create_tmp_var (type, get_name (val));
518   if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
519       || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
520     DECL_GIMPLE_REG_P (var) = 1;
521   return var;
522 }
523 
524 /* Create a temporary to hold the value of VAL.  If IS_FORMAL, try to reuse
525    an existing expression temporary.  */
526 
527 static tree
528 lookup_tmp_var (tree val, bool is_formal)
529 {
530   tree ret;
531 
532   /* If not optimizing, never really reuse a temporary.  local-alloc
533      won't allocate any variable that is used in more than one basic
534      block, which means it will go into memory, causing much extra
535      work in reload and final and poorer code generation, outweighing
536      the extra memory allocation here.  */
537   if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
538     ret = create_tmp_from_val (val);
539   else
540     {
541       elt_t elt, *elt_p;
542       elt_t **slot;
543 
544       elt.val = val;
545       if (!gimplify_ctxp->temp_htab)
546         gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
547       slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
548       if (*slot == NULL)
549 	{
550 	  elt_p = XNEW (elt_t);
551 	  elt_p->val = val;
552 	  elt_p->temp = ret = create_tmp_from_val (val);
553 	  *slot = elt_p;
554 	}
555       else
556 	{
557 	  elt_p = *slot;
558           ret = elt_p->temp;
559 	}
560     }
561 
562   return ret;
563 }
564 
565 /* Helper for get_formal_tmp_var and get_initialized_tmp_var.  */
566 
567 static tree
568 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
569                       bool is_formal, bool allow_ssa)
570 {
571   tree t, mod;
572 
573   /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
574      can create an INIT_EXPR and convert it into a GIMPLE_CALL below.  */
575   gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
576 		 fb_rvalue);
577 
578   if (allow_ssa
579       && gimplify_ctxp->into_ssa
580       && is_gimple_reg_type (TREE_TYPE (val)))
581     {
582       t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
583       if (! gimple_in_ssa_p (cfun))
584 	{
585 	  const char *name = get_name (val);
586 	  if (name)
587 	    SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
588 	}
589     }
590   else
591     t = lookup_tmp_var (val, is_formal);
592 
593   mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
594 
595   SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
596 
597   /* gimplify_modify_expr might want to reduce this further.  */
598   gimplify_and_add (mod, pre_p);
599   ggc_free (mod);
600 
601   return t;
602 }
603 
604 /* Return a formal temporary variable initialized with VAL.  PRE_P is as
605    in gimplify_expr.  Only use this function if:
606 
607    1) The value of the unfactored expression represented by VAL will not
608       change between the initialization and use of the temporary, and
609    2) The temporary will not be otherwise modified.
610 
611    For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
612    and #2 means it is inappropriate for && temps.
613 
614    For other cases, use get_initialized_tmp_var instead.  */
615 
616 tree
617 get_formal_tmp_var (tree val, gimple_seq *pre_p)
618 {
619   return internal_get_tmp_var (val, pre_p, NULL, true, true);
620 }
621 
622 /* Return a temporary variable initialized with VAL.  PRE_P and POST_P
623    are as in gimplify_expr.  */
624 
625 tree
626 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
627 			 bool allow_ssa)
628 {
629   return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
630 }
631 
632 /* Declare all the variables in VARS in SCOPE.  If DEBUG_INFO is true,
633    generate debug info for them; otherwise don't.  */
634 
635 void
636 declare_vars (tree vars, gimple *gs, bool debug_info)
637 {
638   tree last = vars;
639   if (last)
640     {
641       tree temps, block;
642 
643       gbind *scope = as_a <gbind *> (gs);
644 
645       temps = nreverse (last);
646 
647       block = gimple_bind_block (scope);
648       gcc_assert (!block || TREE_CODE (block) == BLOCK);
649       if (!block || !debug_info)
650 	{
651 	  DECL_CHAIN (last) = gimple_bind_vars (scope);
652 	  gimple_bind_set_vars (scope, temps);
653 	}
654       else
655 	{
656 	  /* We need to attach the nodes both to the BIND_EXPR and to its
657 	     associated BLOCK for debugging purposes.  The key point here
658 	     is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
659 	     is a subchain of the BIND_EXPR_VARS of the BIND_EXPR.  */
660 	  if (BLOCK_VARS (block))
661 	    BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
662 	  else
663 	    {
664 	      gimple_bind_set_vars (scope,
665 	      			    chainon (gimple_bind_vars (scope), temps));
666 	      BLOCK_VARS (block) = temps;
667 	    }
668 	}
669     }
670 }
671 
672 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
673    for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly.  Abort if
674    no such upper bound can be obtained.  */
675 
676 static void
677 force_constant_size (tree var)
678 {
679   /* The only attempt we make is by querying the maximum size of objects
680      of the variable's type.  */
681 
682   HOST_WIDE_INT max_size;
683 
684   gcc_assert (VAR_P (var));
685 
686   max_size = max_int_size_in_bytes (TREE_TYPE (var));
687 
688   gcc_assert (max_size >= 0);
689 
690   DECL_SIZE_UNIT (var)
691     = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
692   DECL_SIZE (var)
693     = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
694 }
695 
696 /* Push the temporary variable TMP into the current binding.  */
697 
698 void
699 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
700 {
701   gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
702 
703   /* Later processing assumes that the object size is constant, which might
704      not be true at this point.  Force the use of a constant upper bound in
705      this case.  */
706   if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
707     force_constant_size (tmp);
708 
709   DECL_CONTEXT (tmp) = fn->decl;
710   DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
711 
712   record_vars_into (tmp, fn->decl);
713 }
714 
715 /* Push the temporary variable TMP into the current binding.  */
716 
717 void
718 gimple_add_tmp_var (tree tmp)
719 {
720   gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
721 
722   /* Later processing assumes that the object size is constant, which might
723      not be true at this point.  Force the use of a constant upper bound in
724      this case.  */
725   if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
726     force_constant_size (tmp);
727 
728   DECL_CONTEXT (tmp) = current_function_decl;
729   DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
730 
731   if (gimplify_ctxp)
732     {
733       DECL_CHAIN (tmp) = gimplify_ctxp->temps;
734       gimplify_ctxp->temps = tmp;
735 
736       /* Mark temporaries local within the nearest enclosing parallel.  */
737       if (gimplify_omp_ctxp)
738 	{
739 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
740 	  while (ctx
741 		 && (ctx->region_type == ORT_WORKSHARE
742 		     || ctx->region_type == ORT_SIMD
743 		     || ctx->region_type == ORT_ACC))
744 	    ctx = ctx->outer_context;
745 	  if (ctx)
746 	    omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
747 	}
748     }
749   else if (cfun)
750     record_vars (tmp);
751   else
752     {
753       gimple_seq body_seq;
754 
755       /* This case is for nested functions.  We need to expose the locals
756 	 they create.  */
757       body_seq = gimple_body (current_function_decl);
758       declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
759     }
760 }
761 
762 
763 
764 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
765    nodes that are referenced more than once in GENERIC functions.  This is
766    necessary because gimplification (translation into GIMPLE) is performed
767    by modifying tree nodes in-place, so gimplication of a shared node in a
768    first context could generate an invalid GIMPLE form in a second context.
769 
770    This is achieved with a simple mark/copy/unmark algorithm that walks the
771    GENERIC representation top-down, marks nodes with TREE_VISITED the first
772    time it encounters them, duplicates them if they already have TREE_VISITED
773    set, and finally removes the TREE_VISITED marks it has set.
774 
775    The algorithm works only at the function level, i.e. it generates a GENERIC
776    representation of a function with no nodes shared within the function when
777    passed a GENERIC function (except for nodes that are allowed to be shared).
778 
779    At the global level, it is also necessary to unshare tree nodes that are
780    referenced in more than one function, for the same aforementioned reason.
781    This requires some cooperation from the front-end.  There are 2 strategies:
782 
783      1. Manual unsharing.  The front-end needs to call unshare_expr on every
784         expression that might end up being shared across functions.
785 
786      2. Deep unsharing.  This is an extension of regular unsharing.  Instead
787         of calling unshare_expr on expressions that might be shared across
788         functions, the front-end pre-marks them with TREE_VISITED.  This will
789         ensure that they are unshared on the first reference within functions
790         when the regular unsharing algorithm runs.  The counterpart is that
791         this algorithm must look deeper than for manual unsharing, which is
792         specified by LANG_HOOKS_DEEP_UNSHARING.
793 
794   If there are only few specific cases of node sharing across functions, it is
795   probably easier for a front-end to unshare the expressions manually.  On the
796   contrary, if the expressions generated at the global level are as widespread
797   as expressions generated within functions, deep unsharing is very likely the
798   way to go.  */
799 
800 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
801    These nodes model computations that must be done once.  If we were to
802    unshare something like SAVE_EXPR(i++), the gimplification process would
803    create wrong code.  However, if DATA is non-null, it must hold a pointer
804    set that is used to unshare the subtrees of these nodes.  */
805 
806 static tree
807 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
808 {
809   tree t = *tp;
810   enum tree_code code = TREE_CODE (t);
811 
812   /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
813      copy their subtrees if we can make sure to do it only once.  */
814   if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
815     {
816       if (data && !((hash_set<tree> *)data)->add (t))
817 	;
818       else
819 	*walk_subtrees = 0;
820     }
821 
822   /* Stop at types, decls, constants like copy_tree_r.  */
823   else if (TREE_CODE_CLASS (code) == tcc_type
824 	   || TREE_CODE_CLASS (code) == tcc_declaration
825 	   || TREE_CODE_CLASS (code) == tcc_constant)
826     *walk_subtrees = 0;
827 
828   /* Cope with the statement expression extension.  */
829   else if (code == STATEMENT_LIST)
830     ;
831 
832   /* Leave the bulk of the work to copy_tree_r itself.  */
833   else
834     copy_tree_r (tp, walk_subtrees, NULL);
835 
836   return NULL_TREE;
837 }
838 
839 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
840    If *TP has been visited already, then *TP is deeply copied by calling
841    mostly_copy_tree_r.  DATA is passed to mostly_copy_tree_r unmodified.  */
842 
843 static tree
844 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
845 {
846   tree t = *tp;
847   enum tree_code code = TREE_CODE (t);
848 
849   /* Skip types, decls, and constants.  But we do want to look at their
850      types and the bounds of types.  Mark them as visited so we properly
851      unmark their subtrees on the unmark pass.  If we've already seen them,
852      don't look down further.  */
853   if (TREE_CODE_CLASS (code) == tcc_type
854       || TREE_CODE_CLASS (code) == tcc_declaration
855       || TREE_CODE_CLASS (code) == tcc_constant)
856     {
857       if (TREE_VISITED (t))
858 	*walk_subtrees = 0;
859       else
860 	TREE_VISITED (t) = 1;
861     }
862 
863   /* If this node has been visited already, unshare it and don't look
864      any deeper.  */
865   else if (TREE_VISITED (t))
866     {
867       walk_tree (tp, mostly_copy_tree_r, data, NULL);
868       *walk_subtrees = 0;
869     }
870 
871   /* Otherwise, mark the node as visited and keep looking.  */
872   else
873     TREE_VISITED (t) = 1;
874 
875   return NULL_TREE;
876 }
877 
878 /* Unshare most of the shared trees rooted at *TP.  DATA is passed to the
879    copy_if_shared_r callback unmodified.  */
880 
881 static inline void
882 copy_if_shared (tree *tp, void *data)
883 {
884   walk_tree (tp, copy_if_shared_r, data, NULL);
885 }
886 
887 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
888    any nested functions.  */
889 
890 static void
891 unshare_body (tree fndecl)
892 {
893   struct cgraph_node *cgn = cgraph_node::get (fndecl);
894   /* If the language requires deep unsharing, we need a pointer set to make
895      sure we don't repeatedly unshare subtrees of unshareable nodes.  */
896   hash_set<tree> *visited
897     = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
898 
899   copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
900   copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
901   copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
902 
903   delete visited;
904 
905   if (cgn)
906     for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
907       unshare_body (cgn->decl);
908 }
909 
910 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
911    Subtrees are walked until the first unvisited node is encountered.  */
912 
913 static tree
914 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
915 {
916   tree t = *tp;
917 
918   /* If this node has been visited, unmark it and keep looking.  */
919   if (TREE_VISITED (t))
920     TREE_VISITED (t) = 0;
921 
922   /* Otherwise, don't look any deeper.  */
923   else
924     *walk_subtrees = 0;
925 
926   return NULL_TREE;
927 }
928 
929 /* Unmark the visited trees rooted at *TP.  */
930 
931 static inline void
932 unmark_visited (tree *tp)
933 {
934   walk_tree (tp, unmark_visited_r, NULL, NULL);
935 }
936 
937 /* Likewise, but mark all trees as not visited.  */
938 
939 static void
940 unvisit_body (tree fndecl)
941 {
942   struct cgraph_node *cgn = cgraph_node::get (fndecl);
943 
944   unmark_visited (&DECL_SAVED_TREE (fndecl));
945   unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
946   unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
947 
948   if (cgn)
949     for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
950       unvisit_body (cgn->decl);
951 }
952 
953 /* Unconditionally make an unshared copy of EXPR.  This is used when using
954    stored expressions which span multiple functions, such as BINFO_VTABLE,
955    as the normal unsharing process can't tell that they're shared.  */
956 
957 tree
958 unshare_expr (tree expr)
959 {
960   walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
961   return expr;
962 }
963 
964 /* Worker for unshare_expr_without_location.  */
965 
966 static tree
967 prune_expr_location (tree *tp, int *walk_subtrees, void *)
968 {
969   if (EXPR_P (*tp))
970     SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
971   else
972     *walk_subtrees = 0;
973   return NULL_TREE;
974 }
975 
976 /* Similar to unshare_expr but also prune all expression locations
977    from EXPR.  */
978 
979 tree
980 unshare_expr_without_location (tree expr)
981 {
982   walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
983   if (EXPR_P (expr))
984     walk_tree (&expr, prune_expr_location, NULL, NULL);
985   return expr;
986 }
987 
988 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
989    one, OR_ELSE otherwise.  The location of a STATEMENT_LISTs
990    comprising at least one DEBUG_BEGIN_STMT followed by exactly one
991    EXPR is the location of the EXPR.  */
992 
993 static location_t
994 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
995 {
996   if (!expr)
997     return or_else;
998 
999   if (EXPR_HAS_LOCATION (expr))
1000     return EXPR_LOCATION (expr);
1001 
1002   if (TREE_CODE (expr) != STATEMENT_LIST)
1003     return or_else;
1004 
1005   tree_stmt_iterator i = tsi_start (expr);
1006 
1007   bool found = false;
1008   while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1009     {
1010       found = true;
1011       tsi_next (&i);
1012     }
1013 
1014   if (!found || !tsi_one_before_end_p (i))
1015     return or_else;
1016 
1017   return rexpr_location (tsi_stmt (i), or_else);
1018 }
1019 
1020 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1021    rexpr_location for the potential recursion.  */
1022 
1023 static inline bool
1024 rexpr_has_location (tree expr)
1025 {
1026   return rexpr_location (expr) != UNKNOWN_LOCATION;
1027 }
1028 
1029 
1030 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1031    contain statements and have a value.  Assign its value to a temporary
1032    and give it void_type_node.  Return the temporary, or NULL_TREE if
1033    WRAPPER was already void.  */
1034 
1035 tree
1036 voidify_wrapper_expr (tree wrapper, tree temp)
1037 {
1038   tree type = TREE_TYPE (wrapper);
1039   if (type && !VOID_TYPE_P (type))
1040     {
1041       tree *p;
1042 
1043       /* Set p to point to the body of the wrapper.  Loop until we find
1044 	 something that isn't a wrapper.  */
1045       for (p = &wrapper; p && *p; )
1046 	{
1047 	  switch (TREE_CODE (*p))
1048 	    {
1049 	    case BIND_EXPR:
1050 	      TREE_SIDE_EFFECTS (*p) = 1;
1051 	      TREE_TYPE (*p) = void_type_node;
1052 	      /* For a BIND_EXPR, the body is operand 1.  */
1053 	      p = &BIND_EXPR_BODY (*p);
1054 	      break;
1055 
1056 	    case CLEANUP_POINT_EXPR:
1057 	    case TRY_FINALLY_EXPR:
1058 	    case TRY_CATCH_EXPR:
1059 	      TREE_SIDE_EFFECTS (*p) = 1;
1060 	      TREE_TYPE (*p) = void_type_node;
1061 	      p = &TREE_OPERAND (*p, 0);
1062 	      break;
1063 
1064 	    case STATEMENT_LIST:
1065 	      {
1066 		tree_stmt_iterator i = tsi_last (*p);
1067 		TREE_SIDE_EFFECTS (*p) = 1;
1068 		TREE_TYPE (*p) = void_type_node;
1069 		p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1070 	      }
1071 	      break;
1072 
1073 	    case COMPOUND_EXPR:
1074 	      /* Advance to the last statement.  Set all container types to
1075 		 void.  */
1076 	      for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1077 		{
1078 		  TREE_SIDE_EFFECTS (*p) = 1;
1079 		  TREE_TYPE (*p) = void_type_node;
1080 		}
1081 	      break;
1082 
1083 	    case TRANSACTION_EXPR:
1084 	      TREE_SIDE_EFFECTS (*p) = 1;
1085 	      TREE_TYPE (*p) = void_type_node;
1086 	      p = &TRANSACTION_EXPR_BODY (*p);
1087 	      break;
1088 
1089 	    default:
1090 	      /* Assume that any tree upon which voidify_wrapper_expr is
1091 		 directly called is a wrapper, and that its body is op0.  */
1092 	      if (p == &wrapper)
1093 		{
1094 		  TREE_SIDE_EFFECTS (*p) = 1;
1095 		  TREE_TYPE (*p) = void_type_node;
1096 		  p = &TREE_OPERAND (*p, 0);
1097 		  break;
1098 		}
1099 	      goto out;
1100 	    }
1101 	}
1102 
1103     out:
1104       if (p == NULL || IS_EMPTY_STMT (*p))
1105 	temp = NULL_TREE;
1106       else if (temp)
1107 	{
1108 	  /* The wrapper is on the RHS of an assignment that we're pushing
1109 	     down.  */
1110 	  gcc_assert (TREE_CODE (temp) == INIT_EXPR
1111 		      || TREE_CODE (temp) == MODIFY_EXPR);
1112 	  TREE_OPERAND (temp, 1) = *p;
1113 	  *p = temp;
1114 	}
1115       else
1116 	{
1117 	  temp = create_tmp_var (type, "retval");
1118 	  *p = build2 (INIT_EXPR, type, temp, *p);
1119 	}
1120 
1121       return temp;
1122     }
1123 
1124   return NULL_TREE;
1125 }
1126 
1127 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1128    a temporary through which they communicate.  */
1129 
1130 static void
1131 build_stack_save_restore (gcall **save, gcall **restore)
1132 {
1133   tree tmp_var;
1134 
1135   *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1136   tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1137   gimple_call_set_lhs (*save, tmp_var);
1138 
1139   *restore
1140     = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1141 			 1, tmp_var);
1142 }
1143 
1144 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable.  */
1145 
1146 static tree
1147 build_asan_poison_call_expr (tree decl)
1148 {
1149   /* Do not poison variables that have size equal to zero.  */
1150   tree unit_size = DECL_SIZE_UNIT (decl);
1151   if (zerop (unit_size))
1152     return NULL_TREE;
1153 
1154   tree base = build_fold_addr_expr (decl);
1155 
1156   return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1157 				       void_type_node, 3,
1158 				       build_int_cst (integer_type_node,
1159 						      ASAN_MARK_POISON),
1160 				       base, unit_size);
1161 }
1162 
1163 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1164    on POISON flag, shadow memory of a DECL variable.  The call will be
1165    put on location identified by IT iterator, where BEFORE flag drives
1166    position where the stmt will be put.  */
1167 
1168 static void
1169 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1170 		      bool before)
1171 {
1172   tree unit_size = DECL_SIZE_UNIT (decl);
1173   tree base = build_fold_addr_expr (decl);
1174 
1175   /* Do not poison variables that have size equal to zero.  */
1176   if (zerop (unit_size))
1177     return;
1178 
1179   /* It's necessary to have all stack variables aligned to ASAN granularity
1180      bytes.  */
1181   if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1182     SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1183 
1184   HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1185 
1186   gimple *g
1187     = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1188 				  build_int_cst (integer_type_node, flags),
1189 				  base, unit_size);
1190 
1191   if (before)
1192     gsi_insert_before (it, g, GSI_NEW_STMT);
1193   else
1194     gsi_insert_after (it, g, GSI_NEW_STMT);
1195 }
1196 
1197 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1198    either poisons or unpoisons a DECL.  Created statement is appended
1199    to SEQ_P gimple sequence.  */
1200 
1201 static void
1202 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1203 {
1204   gimple_stmt_iterator it = gsi_last (*seq_p);
1205   bool before = false;
1206 
1207   if (gsi_end_p (it))
1208     before = true;
1209 
1210   asan_poison_variable (decl, poison, &it, before);
1211 }
1212 
1213 /* Sort pair of VAR_DECLs A and B by DECL_UID.  */
1214 
1215 static int
1216 sort_by_decl_uid (const void *a, const void *b)
1217 {
1218   const tree *t1 = (const tree *)a;
1219   const tree *t2 = (const tree *)b;
1220 
1221   int uid1 = DECL_UID (*t1);
1222   int uid2 = DECL_UID (*t2);
1223 
1224   if (uid1 < uid2)
1225     return -1;
1226   else if (uid1 > uid2)
1227     return 1;
1228   else
1229     return 0;
1230 }
1231 
1232 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1233    depending on POISON flag.  Created statement is appended
1234    to SEQ_P gimple sequence.  */
1235 
1236 static void
1237 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1238 {
1239   unsigned c = variables->elements ();
1240   if (c == 0)
1241     return;
1242 
1243   auto_vec<tree> sorted_variables (c);
1244 
1245   for (hash_set<tree>::iterator it = variables->begin ();
1246        it != variables->end (); ++it)
1247     sorted_variables.safe_push (*it);
1248 
1249   sorted_variables.qsort (sort_by_decl_uid);
1250 
1251   unsigned i;
1252   tree var;
1253   FOR_EACH_VEC_ELT (sorted_variables, i, var)
1254     {
1255       asan_poison_variable (var, poison, seq_p);
1256 
1257       /* Add use_after_scope_memory attribute for the variable in order
1258 	 to prevent re-written into SSA.  */
1259       if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1260 			     DECL_ATTRIBUTES (var)))
1261 	DECL_ATTRIBUTES (var)
1262 	  = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1263 		       integer_one_node,
1264 		       DECL_ATTRIBUTES (var));
1265     }
1266 }
1267 
1268 /* Gimplify a BIND_EXPR.  Just voidify and recurse.  */
1269 
1270 static enum gimplify_status
1271 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1272 {
1273   tree bind_expr = *expr_p;
1274   bool old_keep_stack = gimplify_ctxp->keep_stack;
1275   bool old_save_stack = gimplify_ctxp->save_stack;
1276   tree t;
1277   gbind *bind_stmt;
1278   gimple_seq body, cleanup;
1279   gcall *stack_save;
1280   location_t start_locus = 0, end_locus = 0;
1281   tree ret_clauses = NULL;
1282 
1283   tree temp = voidify_wrapper_expr (bind_expr, NULL);
1284 
1285   /* Mark variables seen in this bind expr.  */
1286   for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1287     {
1288       if (VAR_P (t))
1289 	{
1290 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1291 
1292 	  /* Mark variable as local.  */
1293 	  if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1294 	      && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1295 		  || splay_tree_lookup (ctx->variables,
1296 					(splay_tree_key) t) == NULL))
1297 	    {
1298 	      int flag = GOVD_LOCAL;
1299 	      if (ctx->region_type == ORT_SIMD
1300 		  && TREE_ADDRESSABLE (t)
1301 		  && !TREE_STATIC (t))
1302 		{
1303 		  if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1304 		    ctx->add_safelen1 = true;
1305 		  else
1306 		    flag = GOVD_PRIVATE;
1307 		}
1308 	      omp_add_variable (ctx, t, flag | GOVD_SEEN);
1309 	    }
1310 
1311 	  DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1312 
1313 	  if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1314 	    cfun->has_local_explicit_reg_vars = true;
1315 	}
1316 
1317       /* Preliminarily mark non-addressed complex variables as eligible
1318 	 for promotion to gimple registers.  We'll transform their uses
1319 	 as we find them.  */
1320       if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1321 	   || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1322 	  && !TREE_THIS_VOLATILE (t)
1323 	  && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1324 	  && !needs_to_live_in_memory (t))
1325 	DECL_GIMPLE_REG_P (t) = 1;
1326     }
1327 
1328   bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1329 				 BIND_EXPR_BLOCK (bind_expr));
1330   gimple_push_bind_expr (bind_stmt);
1331 
1332   gimplify_ctxp->keep_stack = false;
1333   gimplify_ctxp->save_stack = false;
1334 
1335   /* Gimplify the body into the GIMPLE_BIND tuple's body.  */
1336   body = NULL;
1337   gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1338   gimple_bind_set_body (bind_stmt, body);
1339 
1340   /* Source location wise, the cleanup code (stack_restore and clobbers)
1341      belongs to the end of the block, so propagate what we have.  The
1342      stack_save operation belongs to the beginning of block, which we can
1343      infer from the bind_expr directly if the block has no explicit
1344      assignment.  */
1345   if (BIND_EXPR_BLOCK (bind_expr))
1346     {
1347       end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1348       start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1349     }
1350   if (start_locus == 0)
1351     start_locus = EXPR_LOCATION (bind_expr);
1352 
1353   cleanup = NULL;
1354   stack_save = NULL;
1355 
1356   /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1357      the stack space allocated to the VLAs.  */
1358   if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1359     {
1360       gcall *stack_restore;
1361 
1362       /* Save stack on entry and restore it on exit.  Add a try_finally
1363 	 block to achieve this.  */
1364       build_stack_save_restore (&stack_save, &stack_restore);
1365 
1366       gimple_set_location (stack_save, start_locus);
1367       gimple_set_location (stack_restore, end_locus);
1368 
1369       gimplify_seq_add_stmt (&cleanup, stack_restore);
1370     }
1371 
1372   /* Add clobbers for all variables that go out of scope.  */
1373   for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1374     {
1375       if (VAR_P (t)
1376 	  && !is_global_var (t)
1377 	  && DECL_CONTEXT (t) == current_function_decl)
1378 	{
1379 	  if (!DECL_HARD_REGISTER (t)
1380 	      && !TREE_THIS_VOLATILE (t)
1381 	      && !DECL_HAS_VALUE_EXPR_P (t)
1382 	      /* Only care for variables that have to be in memory.  Others
1383 		 will be rewritten into SSA names, hence moved to the
1384 		 top-level.  */
1385 	      && !is_gimple_reg (t)
1386 	      && flag_stack_reuse != SR_NONE)
1387 	    {
1388 	      tree clobber = build_constructor (TREE_TYPE (t), NULL);
1389 	      gimple *clobber_stmt;
1390 	      TREE_THIS_VOLATILE (clobber) = 1;
1391 	      clobber_stmt = gimple_build_assign (t, clobber);
1392 	      gimple_set_location (clobber_stmt, end_locus);
1393 	      gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1394 	    }
1395 
1396 	  if (flag_openacc && oacc_declare_returns != NULL)
1397 	    {
1398 	      tree *c = oacc_declare_returns->get (t);
1399 	      if (c != NULL)
1400 		{
1401 		  if (ret_clauses)
1402 		    OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1403 
1404 		  ret_clauses = *c;
1405 
1406 		  oacc_declare_returns->remove (t);
1407 
1408 		  if (oacc_declare_returns->elements () == 0)
1409 		    {
1410 		      delete oacc_declare_returns;
1411 		      oacc_declare_returns = NULL;
1412 		    }
1413 		}
1414 	    }
1415 	}
1416 
1417       if (asan_poisoned_variables != NULL
1418 	  && asan_poisoned_variables->contains (t))
1419 	{
1420 	  asan_poisoned_variables->remove (t);
1421 	  asan_poison_variable (t, true, &cleanup);
1422 	}
1423 
1424       if (gimplify_ctxp->live_switch_vars != NULL
1425 	  && gimplify_ctxp->live_switch_vars->contains (t))
1426 	gimplify_ctxp->live_switch_vars->remove (t);
1427     }
1428 
1429   if (ret_clauses)
1430     {
1431       gomp_target *stmt;
1432       gimple_stmt_iterator si = gsi_start (cleanup);
1433 
1434       stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1435 				      ret_clauses);
1436       gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1437     }
1438 
1439   if (cleanup)
1440     {
1441       gtry *gs;
1442       gimple_seq new_body;
1443 
1444       new_body = NULL;
1445       gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1446 	  		     GIMPLE_TRY_FINALLY);
1447 
1448       if (stack_save)
1449 	gimplify_seq_add_stmt (&new_body, stack_save);
1450       gimplify_seq_add_stmt (&new_body, gs);
1451       gimple_bind_set_body (bind_stmt, new_body);
1452     }
1453 
1454   /* keep_stack propagates all the way up to the outermost BIND_EXPR.  */
1455   if (!gimplify_ctxp->keep_stack)
1456     gimplify_ctxp->keep_stack = old_keep_stack;
1457   gimplify_ctxp->save_stack = old_save_stack;
1458 
1459   gimple_pop_bind_expr ();
1460 
1461   gimplify_seq_add_stmt (pre_p, bind_stmt);
1462 
1463   if (temp)
1464     {
1465       *expr_p = temp;
1466       return GS_OK;
1467     }
1468 
1469   *expr_p = NULL_TREE;
1470   return GS_ALL_DONE;
1471 }
1472 
1473 /* Maybe add early return predict statement to PRE_P sequence.  */
1474 
1475 static void
1476 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1477 {
1478   /* If we are not in a conditional context, add PREDICT statement.  */
1479   if (gimple_conditional_context ())
1480     {
1481       gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1482 					      NOT_TAKEN);
1483       gimplify_seq_add_stmt (pre_p, predict);
1484     }
1485 }
1486 
1487 /* Gimplify a RETURN_EXPR.  If the expression to be returned is not a
1488    GIMPLE value, it is assigned to a new temporary and the statement is
1489    re-written to return the temporary.
1490 
1491    PRE_P points to the sequence where side effects that must happen before
1492    STMT should be stored.  */
1493 
1494 static enum gimplify_status
1495 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1496 {
1497   greturn *ret;
1498   tree ret_expr = TREE_OPERAND (stmt, 0);
1499   tree result_decl, result;
1500 
1501   if (ret_expr == error_mark_node)
1502     return GS_ERROR;
1503 
1504   if (!ret_expr
1505       || TREE_CODE (ret_expr) == RESULT_DECL)
1506     {
1507       maybe_add_early_return_predict_stmt (pre_p);
1508       greturn *ret = gimple_build_return (ret_expr);
1509       gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1510       gimplify_seq_add_stmt (pre_p, ret);
1511       return GS_ALL_DONE;
1512     }
1513 
1514   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1515     result_decl = NULL_TREE;
1516   else
1517     {
1518       result_decl = TREE_OPERAND (ret_expr, 0);
1519 
1520       /* See through a return by reference.  */
1521       if (TREE_CODE (result_decl) == INDIRECT_REF)
1522 	result_decl = TREE_OPERAND (result_decl, 0);
1523 
1524       gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1525 		   || TREE_CODE (ret_expr) == INIT_EXPR)
1526 		  && TREE_CODE (result_decl) == RESULT_DECL);
1527     }
1528 
1529   /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1530      Recall that aggregate_value_p is FALSE for any aggregate type that is
1531      returned in registers.  If we're returning values in registers, then
1532      we don't want to extend the lifetime of the RESULT_DECL, particularly
1533      across another call.  In addition, for those aggregates for which
1534      hard_function_value generates a PARALLEL, we'll die during normal
1535      expansion of structure assignments; there's special code in expand_return
1536      to handle this case that does not exist in expand_expr.  */
1537   if (!result_decl)
1538     result = NULL_TREE;
1539   else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1540     {
1541       if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1542 	{
1543 	  if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1544 	    gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1545 	  /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1546 	     should be effectively allocated by the caller, i.e. all calls to
1547 	     this function must be subject to the Return Slot Optimization.  */
1548 	  gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1549 	  gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1550 	}
1551       result = result_decl;
1552     }
1553   else if (gimplify_ctxp->return_temp)
1554     result = gimplify_ctxp->return_temp;
1555   else
1556     {
1557       result = create_tmp_reg (TREE_TYPE (result_decl));
1558 
1559       /* ??? With complex control flow (usually involving abnormal edges),
1560 	 we can wind up warning about an uninitialized value for this.  Due
1561 	 to how this variable is constructed and initialized, this is never
1562 	 true.  Give up and never warn.  */
1563       TREE_NO_WARNING (result) = 1;
1564 
1565       gimplify_ctxp->return_temp = result;
1566     }
1567 
1568   /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1569      Then gimplify the whole thing.  */
1570   if (result != result_decl)
1571     TREE_OPERAND (ret_expr, 0) = result;
1572 
1573   gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1574 
1575   maybe_add_early_return_predict_stmt (pre_p);
1576   ret = gimple_build_return (result);
1577   gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1578   gimplify_seq_add_stmt (pre_p, ret);
1579 
1580   return GS_ALL_DONE;
1581 }
1582 
1583 /* Gimplify a variable-length array DECL.  */
1584 
1585 static void
1586 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1587 {
1588   /* This is a variable-sized decl.  Simplify its size and mark it
1589      for deferred expansion.  */
1590   tree t, addr, ptr_type;
1591 
1592   gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1593   gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1594 
1595   /* Don't mess with a DECL_VALUE_EXPR set by the front-end.  */
1596   if (DECL_HAS_VALUE_EXPR_P (decl))
1597     return;
1598 
1599   /* All occurrences of this decl in final gimplified code will be
1600      replaced by indirection.  Setting DECL_VALUE_EXPR does two
1601      things: First, it lets the rest of the gimplifier know what
1602      replacement to use.  Second, it lets the debug info know
1603      where to find the value.  */
1604   ptr_type = build_pointer_type (TREE_TYPE (decl));
1605   addr = create_tmp_var (ptr_type, get_name (decl));
1606   DECL_IGNORED_P (addr) = 0;
1607   t = build_fold_indirect_ref (addr);
1608   TREE_THIS_NOTRAP (t) = 1;
1609   SET_DECL_VALUE_EXPR (decl, t);
1610   DECL_HAS_VALUE_EXPR_P (decl) = 1;
1611 
1612   t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1613 			      max_int_size_in_bytes (TREE_TYPE (decl)));
1614   /* The call has been built for a variable-sized object.  */
1615   CALL_ALLOCA_FOR_VAR_P (t) = 1;
1616   t = fold_convert (ptr_type, t);
1617   t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1618 
1619   gimplify_and_add (t, seq_p);
1620 }
1621 
1622 /* A helper function to be called via walk_tree.  Mark all labels under *TP
1623    as being forced.  To be called for DECL_INITIAL of static variables.  */
1624 
1625 static tree
1626 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1627 {
1628   if (TYPE_P (*tp))
1629     *walk_subtrees = 0;
1630   if (TREE_CODE (*tp) == LABEL_DECL)
1631     {
1632       FORCED_LABEL (*tp) = 1;
1633       cfun->has_forced_label_in_static = 1;
1634     }
1635 
1636   return NULL_TREE;
1637 }
1638 
1639 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1640    and initialization explicit.  */
1641 
1642 static enum gimplify_status
1643 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1644 {
1645   tree stmt = *stmt_p;
1646   tree decl = DECL_EXPR_DECL (stmt);
1647 
1648   *stmt_p = NULL_TREE;
1649 
1650   if (TREE_TYPE (decl) == error_mark_node)
1651     return GS_ERROR;
1652 
1653   if ((TREE_CODE (decl) == TYPE_DECL
1654        || VAR_P (decl))
1655       && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1656     {
1657       gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1658       if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1659 	gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1660     }
1661 
1662   /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1663      in case its size expressions contain problematic nodes like CALL_EXPR.  */
1664   if (TREE_CODE (decl) == TYPE_DECL
1665       && DECL_ORIGINAL_TYPE (decl)
1666       && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1667     {
1668       gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1669       if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1670 	gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1671     }
1672 
1673   if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1674     {
1675       tree init = DECL_INITIAL (decl);
1676       bool is_vla = false;
1677 
1678       if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1679 	  || (!TREE_STATIC (decl)
1680 	      && flag_stack_check == GENERIC_STACK_CHECK
1681 	      && compare_tree_int (DECL_SIZE_UNIT (decl),
1682 				   STACK_CHECK_MAX_VAR_SIZE) > 0))
1683 	{
1684 	  gimplify_vla_decl (decl, seq_p);
1685 	  is_vla = true;
1686 	}
1687 
1688       if (asan_poisoned_variables
1689 	  && !is_vla
1690 	  && TREE_ADDRESSABLE (decl)
1691 	  && !TREE_STATIC (decl)
1692 	  && !DECL_HAS_VALUE_EXPR_P (decl)
1693 	  && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1694 	  && dbg_cnt (asan_use_after_scope)
1695 	  && !gimplify_omp_ctxp)
1696 	{
1697 	  asan_poisoned_variables->add (decl);
1698 	  asan_poison_variable (decl, false, seq_p);
1699 	  if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1700 	    gimplify_ctxp->live_switch_vars->add (decl);
1701 	}
1702 
1703       /* Some front ends do not explicitly declare all anonymous
1704 	 artificial variables.  We compensate here by declaring the
1705 	 variables, though it would be better if the front ends would
1706 	 explicitly declare them.  */
1707       if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1708 	  && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1709 	gimple_add_tmp_var (decl);
1710 
1711       if (init && init != error_mark_node)
1712 	{
1713 	  if (!TREE_STATIC (decl))
1714 	    {
1715 	      DECL_INITIAL (decl) = NULL_TREE;
1716 	      init = build2 (INIT_EXPR, void_type_node, decl, init);
1717 	      gimplify_and_add (init, seq_p);
1718 	      ggc_free (init);
1719 	    }
1720 	  else
1721 	    /* We must still examine initializers for static variables
1722 	       as they may contain a label address.  */
1723 	    walk_tree (&init, force_labels_r, NULL, NULL);
1724 	}
1725     }
1726 
1727   return GS_ALL_DONE;
1728 }
1729 
1730 /* Gimplify a LOOP_EXPR.  Normally this just involves gimplifying the body
1731    and replacing the LOOP_EXPR with goto, but if the loop contains an
1732    EXIT_EXPR, we need to append a label for it to jump to.  */
1733 
1734 static enum gimplify_status
1735 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1736 {
1737   tree saved_label = gimplify_ctxp->exit_label;
1738   tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1739 
1740   gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1741 
1742   gimplify_ctxp->exit_label = NULL_TREE;
1743 
1744   gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1745 
1746   gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1747 
1748   if (gimplify_ctxp->exit_label)
1749     gimplify_seq_add_stmt (pre_p,
1750 			   gimple_build_label (gimplify_ctxp->exit_label));
1751 
1752   gimplify_ctxp->exit_label = saved_label;
1753 
1754   *expr_p = NULL;
1755   return GS_ALL_DONE;
1756 }
1757 
1758 /* Gimplify a statement list onto a sequence.  These may be created either
1759    by an enlightened front-end, or by shortcut_cond_expr.  */
1760 
1761 static enum gimplify_status
1762 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1763 {
1764   tree temp = voidify_wrapper_expr (*expr_p, NULL);
1765 
1766   tree_stmt_iterator i = tsi_start (*expr_p);
1767 
1768   while (!tsi_end_p (i))
1769     {
1770       gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1771       tsi_delink (&i);
1772     }
1773 
1774   if (temp)
1775     {
1776       *expr_p = temp;
1777       return GS_OK;
1778     }
1779 
1780   return GS_ALL_DONE;
1781 }
1782 
1783 /* Callback for walk_gimple_seq.  */
1784 
1785 static tree
1786 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1787 			   struct walk_stmt_info *wi)
1788 {
1789   gimple *stmt = gsi_stmt (*gsi_p);
1790 
1791   *handled_ops_p = true;
1792   switch (gimple_code (stmt))
1793     {
1794     case GIMPLE_TRY:
1795       /* A compiler-generated cleanup or a user-written try block.
1796 	 If it's empty, don't dive into it--that would result in
1797 	 worse location info.  */
1798       if (gimple_try_eval (stmt) == NULL)
1799 	{
1800 	  wi->info = stmt;
1801 	  return integer_zero_node;
1802 	}
1803       /* Fall through.  */
1804     case GIMPLE_BIND:
1805     case GIMPLE_CATCH:
1806     case GIMPLE_EH_FILTER:
1807     case GIMPLE_TRANSACTION:
1808       /* Walk the sub-statements.  */
1809       *handled_ops_p = false;
1810       break;
1811 
1812     case GIMPLE_DEBUG:
1813       /* Ignore these.  We may generate them before declarations that
1814 	 are never executed.  If there's something to warn about,
1815 	 there will be non-debug stmts too, and we'll catch those.  */
1816       break;
1817 
1818     case GIMPLE_CALL:
1819       if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1820 	{
1821 	  *handled_ops_p = false;
1822 	  break;
1823 	}
1824       /* Fall through.  */
1825     default:
1826       /* Save the first "real" statement (not a decl/lexical scope/...).  */
1827       wi->info = stmt;
1828       return integer_zero_node;
1829     }
1830   return NULL_TREE;
1831 }
1832 
1833 /* Possibly warn about unreachable statements between switch's controlling
1834    expression and the first case.  SEQ is the body of a switch expression.  */
1835 
1836 static void
1837 maybe_warn_switch_unreachable (gimple_seq seq)
1838 {
1839   if (!warn_switch_unreachable
1840       /* This warning doesn't play well with Fortran when optimizations
1841 	 are on.  */
1842       || lang_GNU_Fortran ()
1843       || seq == NULL)
1844     return;
1845 
1846   struct walk_stmt_info wi;
1847   memset (&wi, 0, sizeof (wi));
1848   walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1849   gimple *stmt = (gimple *) wi.info;
1850 
1851   if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1852     {
1853       if (gimple_code (stmt) == GIMPLE_GOTO
1854 	  && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1855 	  && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1856 	/* Don't warn for compiler-generated gotos.  These occur
1857 	   in Duff's devices, for example.  */;
1858       else
1859 	warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1860 		    "statement will never be executed");
1861     }
1862 }
1863 
1864 
1865 /* A label entry that pairs label and a location.  */
1866 struct label_entry
1867 {
1868   tree label;
1869   location_t loc;
1870 };
1871 
1872 /* Find LABEL in vector of label entries VEC.  */
1873 
1874 static struct label_entry *
1875 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1876 {
1877   unsigned int i;
1878   struct label_entry *l;
1879 
1880   FOR_EACH_VEC_ELT (*vec, i, l)
1881     if (l->label == label)
1882       return l;
1883   return NULL;
1884 }
1885 
1886 /* Return true if LABEL, a LABEL_DECL, represents a case label
1887    in a vector of labels CASES.  */
1888 
1889 static bool
1890 case_label_p (const vec<tree> *cases, tree label)
1891 {
1892   unsigned int i;
1893   tree l;
1894 
1895   FOR_EACH_VEC_ELT (*cases, i, l)
1896     if (CASE_LABEL (l) == label)
1897       return true;
1898   return false;
1899 }
1900 
1901 /* Find the last nondebug statement in a scope STMT.  */
1902 
1903 static gimple *
1904 last_stmt_in_scope (gimple *stmt)
1905 {
1906   if (!stmt)
1907     return NULL;
1908 
1909   switch (gimple_code (stmt))
1910     {
1911     case GIMPLE_BIND:
1912       {
1913 	gbind *bind = as_a <gbind *> (stmt);
1914 	stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
1915 	return last_stmt_in_scope (stmt);
1916       }
1917 
1918     case GIMPLE_TRY:
1919       {
1920 	gtry *try_stmt = as_a <gtry *> (stmt);
1921 	stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
1922 	gimple *last_eval = last_stmt_in_scope (stmt);
1923 	if (gimple_stmt_may_fallthru (last_eval)
1924 	    && (last_eval == NULL
1925 		|| !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1926 	    && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1927 	  {
1928 	    stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
1929 	    return last_stmt_in_scope (stmt);
1930 	  }
1931 	else
1932 	  return last_eval;
1933       }
1934 
1935     case GIMPLE_DEBUG:
1936       gcc_unreachable ();
1937 
1938     default:
1939       return stmt;
1940     }
1941 }
1942 
1943 /* Collect interesting labels in LABELS and return the statement preceding
1944    another case label, or a user-defined label.  */
1945 
1946 static gimple *
1947 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1948 			    auto_vec <struct label_entry> *labels)
1949 {
1950   gimple *prev = NULL;
1951 
1952   do
1953     {
1954       if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
1955 	{
1956 	  /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
1957 	     which starts on a GIMPLE_SWITCH and ends with a break label.
1958 	     Handle that as a single statement that can fall through.  */
1959 	  gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
1960 	  gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
1961 	  gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
1962 	  if (last
1963 	      && gimple_code (first) == GIMPLE_SWITCH
1964 	      && gimple_code (last) == GIMPLE_LABEL)
1965 	    {
1966 	      tree label = gimple_label_label (as_a <glabel *> (last));
1967 	      if (SWITCH_BREAK_LABEL_P (label))
1968 		{
1969 		  prev = bind;
1970 		  gsi_next (gsi_p);
1971 		  continue;
1972 		}
1973 	    }
1974 	}
1975       if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1976 	  || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1977 	{
1978 	  /* Nested scope.  Only look at the last statement of
1979 	     the innermost scope.  */
1980 	  location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
1981 	  gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
1982 	  if (last)
1983 	    {
1984 	      prev = last;
1985 	      /* It might be a label without a location.  Use the
1986 		 location of the scope then.  */
1987 	      if (!gimple_has_location (prev))
1988 		gimple_set_location (prev, bind_loc);
1989 	    }
1990 	  gsi_next (gsi_p);
1991 	  continue;
1992 	}
1993 
1994       /* Ifs are tricky.  */
1995       if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
1996 	{
1997 	  gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
1998 	  tree false_lab = gimple_cond_false_label (cond_stmt);
1999 	  location_t if_loc = gimple_location (cond_stmt);
2000 
2001 	  /* If we have e.g.
2002 	       if (i > 1) goto <D.2259>; else goto D;
2003 	     we can't do much with the else-branch.  */
2004 	  if (!DECL_ARTIFICIAL (false_lab))
2005 	    break;
2006 
2007 	  /* Go on until the false label, then one step back.  */
2008 	  for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2009 	    {
2010 	      gimple *stmt = gsi_stmt (*gsi_p);
2011 	      if (gimple_code (stmt) == GIMPLE_LABEL
2012 		  && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2013 		break;
2014 	    }
2015 
2016 	  /* Not found?  Oops.  */
2017 	  if (gsi_end_p (*gsi_p))
2018 	    break;
2019 
2020 	  struct label_entry l = { false_lab, if_loc };
2021 	  labels->safe_push (l);
2022 
2023 	  /* Go to the last statement of the then branch.  */
2024 	  gsi_prev (gsi_p);
2025 
2026 	  /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2027 	     <D.1759>:
2028 	     <stmt>;
2029 	     goto <D.1761>;
2030 	     <D.1760>:
2031 	   */
2032 	  if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2033 	      && !gimple_has_location (gsi_stmt (*gsi_p)))
2034 	    {
2035 	      /* Look at the statement before, it might be
2036 		 attribute fallthrough, in which case don't warn.  */
2037 	      gsi_prev (gsi_p);
2038 	      bool fallthru_before_dest
2039 		= gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2040 	      gsi_next (gsi_p);
2041 	      tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2042 	      if (!fallthru_before_dest)
2043 		{
2044 		  struct label_entry l = { goto_dest, if_loc };
2045 		  labels->safe_push (l);
2046 		}
2047 	    }
2048 	  /* And move back.  */
2049 	  gsi_next (gsi_p);
2050 	}
2051 
2052       /* Remember the last statement.  Skip labels that are of no interest
2053 	 to us.  */
2054       if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2055 	{
2056 	  tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2057 	  if (find_label_entry (labels, label))
2058 	    prev = gsi_stmt (*gsi_p);
2059 	}
2060       else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2061 	;
2062       else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2063 	prev = gsi_stmt (*gsi_p);
2064       gsi_next (gsi_p);
2065     }
2066   while (!gsi_end_p (*gsi_p)
2067 	 /* Stop if we find a case or a user-defined label.  */
2068 	 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2069 	     || !gimple_has_location (gsi_stmt (*gsi_p))));
2070 
2071   return prev;
2072 }
2073 
2074 /* Return true if the switch fallthough warning should occur.  LABEL is
2075    the label statement that we're falling through to.  */
2076 
2077 static bool
2078 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2079 {
2080   gimple_stmt_iterator gsi = *gsi_p;
2081 
2082   /* Don't warn if the label is marked with a "falls through" comment.  */
2083   if (FALLTHROUGH_LABEL_P (label))
2084     return false;
2085 
2086   /* Don't warn for non-case labels followed by a statement:
2087        case 0:
2088 	 foo ();
2089        label:
2090 	 bar ();
2091      as these are likely intentional.  */
2092   if (!case_label_p (&gimplify_ctxp->case_labels, label))
2093     {
2094       tree l;
2095       while (!gsi_end_p (gsi)
2096 	     && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2097 	     && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2098 	     && !case_label_p (&gimplify_ctxp->case_labels, l))
2099 	gsi_next_nondebug (&gsi);
2100       if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2101 	return false;
2102     }
2103 
2104   /* Don't warn for terminated branches, i.e. when the subsequent case labels
2105      immediately breaks.  */
2106   gsi = *gsi_p;
2107 
2108   /* Skip all immediately following labels.  */
2109   while (!gsi_end_p (gsi)
2110 	 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2111 	     || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2112     gsi_next_nondebug (&gsi);
2113 
2114   /* { ... something; default:; } */
2115   if (gsi_end_p (gsi)
2116       /* { ... something; default: break; } or
2117 	 { ... something; default: goto L; } */
2118       || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2119       /* { ... something; default: return; } */
2120       || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2121     return false;
2122 
2123   return true;
2124 }
2125 
2126 /* Callback for walk_gimple_seq.  */
2127 
2128 static tree
2129 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2130 			     struct walk_stmt_info *)
2131 {
2132   gimple *stmt = gsi_stmt (*gsi_p);
2133 
2134   *handled_ops_p = true;
2135   switch (gimple_code (stmt))
2136     {
2137     case GIMPLE_TRY:
2138     case GIMPLE_BIND:
2139     case GIMPLE_CATCH:
2140     case GIMPLE_EH_FILTER:
2141     case GIMPLE_TRANSACTION:
2142       /* Walk the sub-statements.  */
2143       *handled_ops_p = false;
2144       break;
2145 
2146     /* Find a sequence of form:
2147 
2148        GIMPLE_LABEL
2149        [...]
2150        <may fallthru stmt>
2151        GIMPLE_LABEL
2152 
2153        and possibly warn.  */
2154     case GIMPLE_LABEL:
2155       {
2156 	/* Found a label.  Skip all immediately following labels.  */
2157 	while (!gsi_end_p (*gsi_p)
2158 	       && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2159 	  gsi_next_nondebug (gsi_p);
2160 
2161 	/* There might be no more statements.  */
2162 	if (gsi_end_p (*gsi_p))
2163 	  return integer_zero_node;
2164 
2165 	/* Vector of labels that fall through.  */
2166 	auto_vec <struct label_entry> labels;
2167 	gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
2168 
2169 	/* There might be no more statements.  */
2170 	if (gsi_end_p (*gsi_p))
2171 	  return integer_zero_node;
2172 
2173 	gimple *next = gsi_stmt (*gsi_p);
2174 	tree label;
2175 	/* If what follows is a label, then we may have a fallthrough.  */
2176 	if (gimple_code (next) == GIMPLE_LABEL
2177 	    && gimple_has_location (next)
2178 	    && (label = gimple_label_label (as_a <glabel *> (next)))
2179 	    && prev != NULL)
2180 	  {
2181 	    struct label_entry *l;
2182 	    bool warned_p = false;
2183 	    if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2184 	      /* Quiet.  */;
2185 	    else if (gimple_code (prev) == GIMPLE_LABEL
2186 		     && (label = gimple_label_label (as_a <glabel *> (prev)))
2187 		     && (l = find_label_entry (&labels, label)))
2188 	      warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2189 				     "this statement may fall through");
2190 	    else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2191 		     /* Try to be clever and don't warn when the statement
2192 			can't actually fall through.  */
2193 		     && gimple_stmt_may_fallthru (prev)
2194 		     && gimple_has_location (prev))
2195 	      warned_p = warning_at (gimple_location (prev),
2196 				     OPT_Wimplicit_fallthrough_,
2197 				     "this statement may fall through");
2198 	    if (warned_p)
2199 	      inform (gimple_location (next), "here");
2200 
2201 	    /* Mark this label as processed so as to prevent multiple
2202 	       warnings in nested switches.  */
2203 	    FALLTHROUGH_LABEL_P (label) = true;
2204 
2205 	    /* So that next warn_implicit_fallthrough_r will start looking for
2206 	       a new sequence starting with this label.  */
2207 	    gsi_prev (gsi_p);
2208 	  }
2209       }
2210       break;
2211    default:
2212       break;
2213     }
2214   return NULL_TREE;
2215 }
2216 
2217 /* Warn when a switch case falls through.  */
2218 
2219 static void
2220 maybe_warn_implicit_fallthrough (gimple_seq seq)
2221 {
2222   if (!warn_implicit_fallthrough)
2223     return;
2224 
2225   /* This warning is meant for C/C++/ObjC/ObjC++ only.  */
2226   if (!(lang_GNU_C ()
2227 	|| lang_GNU_CXX ()
2228 	|| lang_GNU_OBJC ()))
2229     return;
2230 
2231   struct walk_stmt_info wi;
2232   memset (&wi, 0, sizeof (wi));
2233   walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2234 }
2235 
2236 /* Callback for walk_gimple_seq.  */
2237 
2238 static tree
2239 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2240 		      struct walk_stmt_info *)
2241 {
2242   gimple *stmt = gsi_stmt (*gsi_p);
2243 
2244   *handled_ops_p = true;
2245   switch (gimple_code (stmt))
2246     {
2247     case GIMPLE_TRY:
2248     case GIMPLE_BIND:
2249     case GIMPLE_CATCH:
2250     case GIMPLE_EH_FILTER:
2251     case GIMPLE_TRANSACTION:
2252       /* Walk the sub-statements.  */
2253       *handled_ops_p = false;
2254       break;
2255     case GIMPLE_CALL:
2256       if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2257 	{
2258 	  gsi_remove (gsi_p, true);
2259 	  if (gsi_end_p (*gsi_p))
2260 	    return integer_zero_node;
2261 
2262 	  bool found = false;
2263 	  location_t loc = gimple_location (stmt);
2264 
2265 	  gimple_stmt_iterator gsi2 = *gsi_p;
2266 	  stmt = gsi_stmt (gsi2);
2267 	  if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2268 	    {
2269 	      /* Go on until the artificial label.  */
2270 	      tree goto_dest = gimple_goto_dest (stmt);
2271 	      for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2272 		{
2273 		  if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2274 		      && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2275 			   == goto_dest)
2276 		    break;
2277 		}
2278 
2279 	      /* Not found?  Stop.  */
2280 	      if (gsi_end_p (gsi2))
2281 		break;
2282 
2283 	      /* Look one past it.  */
2284 	      gsi_next (&gsi2);
2285 	    }
2286 
2287 	  /* We're looking for a case label or default label here.  */
2288 	  while (!gsi_end_p (gsi2))
2289 	    {
2290 	      stmt = gsi_stmt (gsi2);
2291 	      if (gimple_code (stmt) == GIMPLE_LABEL)
2292 		{
2293 		  tree label = gimple_label_label (as_a <glabel *> (stmt));
2294 		  if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2295 		    {
2296 		      found = true;
2297 		      break;
2298 		    }
2299 		}
2300 	      else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2301 		;
2302 	      else if (!is_gimple_debug (stmt))
2303 		/* Anything else is not expected.  */
2304 		break;
2305 	      gsi_next (&gsi2);
2306 	    }
2307 	  if (!found)
2308 	    warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2309 			"a case label or default label");
2310 	}
2311       break;
2312     default:
2313       break;
2314     }
2315   return NULL_TREE;
2316 }
2317 
2318 /* Expand all FALLTHROUGH () calls in SEQ.  */
2319 
2320 static void
2321 expand_FALLTHROUGH (gimple_seq *seq_p)
2322 {
2323   struct walk_stmt_info wi;
2324   memset (&wi, 0, sizeof (wi));
2325   walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2326 }
2327 
2328 
2329 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2330    branch to.  */
2331 
2332 static enum gimplify_status
2333 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2334 {
2335   tree switch_expr = *expr_p;
2336   gimple_seq switch_body_seq = NULL;
2337   enum gimplify_status ret;
2338   tree index_type = TREE_TYPE (switch_expr);
2339   if (index_type == NULL_TREE)
2340     index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2341 
2342   ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2343                        fb_rvalue);
2344   if (ret == GS_ERROR || ret == GS_UNHANDLED)
2345     return ret;
2346 
2347   if (SWITCH_BODY (switch_expr))
2348     {
2349       vec<tree> labels;
2350       vec<tree> saved_labels;
2351       hash_set<tree> *saved_live_switch_vars = NULL;
2352       tree default_case = NULL_TREE;
2353       gswitch *switch_stmt;
2354 
2355       /* Save old labels, get new ones from body, then restore the old
2356          labels.  Save all the things from the switch body to append after.  */
2357       saved_labels = gimplify_ctxp->case_labels;
2358       gimplify_ctxp->case_labels.create (8);
2359 
2360       /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR.  */
2361       saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2362       tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2363       if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2364 	gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2365       else
2366 	gimplify_ctxp->live_switch_vars = NULL;
2367 
2368       bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2369       gimplify_ctxp->in_switch_expr = true;
2370 
2371       gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2372 
2373       gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2374       maybe_warn_switch_unreachable (switch_body_seq);
2375       maybe_warn_implicit_fallthrough (switch_body_seq);
2376       /* Only do this for the outermost GIMPLE_SWITCH.  */
2377       if (!gimplify_ctxp->in_switch_expr)
2378 	expand_FALLTHROUGH (&switch_body_seq);
2379 
2380       labels = gimplify_ctxp->case_labels;
2381       gimplify_ctxp->case_labels = saved_labels;
2382 
2383       if (gimplify_ctxp->live_switch_vars)
2384 	{
2385 	  gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2386 	  delete gimplify_ctxp->live_switch_vars;
2387 	}
2388       gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2389 
2390       preprocess_case_label_vec_for_gimple (labels, index_type,
2391 					    &default_case);
2392 
2393       bool add_bind = false;
2394       if (!default_case)
2395 	{
2396 	  glabel *new_default;
2397 
2398 	  default_case
2399 	    = build_case_label (NULL_TREE, NULL_TREE,
2400 				create_artificial_label (UNKNOWN_LOCATION));
2401 	  if (old_in_switch_expr)
2402 	    {
2403 	      SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2404 	      add_bind = true;
2405 	    }
2406 	  new_default = gimple_build_label (CASE_LABEL (default_case));
2407 	  gimplify_seq_add_stmt (&switch_body_seq, new_default);
2408 	}
2409       else if (old_in_switch_expr)
2410 	{
2411 	  gimple *last = gimple_seq_last_stmt (switch_body_seq);
2412 	  if (last && gimple_code (last) == GIMPLE_LABEL)
2413 	    {
2414 	      tree label = gimple_label_label (as_a <glabel *> (last));
2415 	      if (SWITCH_BREAK_LABEL_P (label))
2416 		add_bind = true;
2417 	    }
2418 	}
2419 
2420       switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2421 					 default_case, labels);
2422       /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2423 	 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2424 	 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2425 	 so that we can easily find the start and end of the switch
2426 	 statement.  */
2427       if (add_bind)
2428 	{
2429 	  gimple_seq bind_body = NULL;
2430 	  gimplify_seq_add_stmt (&bind_body, switch_stmt);
2431 	  gimple_seq_add_seq (&bind_body, switch_body_seq);
2432 	  gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2433 	  gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2434 	  gimplify_seq_add_stmt (pre_p, bind);
2435 	}
2436       else
2437 	{
2438 	  gimplify_seq_add_stmt (pre_p, switch_stmt);
2439 	  gimplify_seq_add_seq (pre_p, switch_body_seq);
2440 	}
2441       labels.release ();
2442     }
2443   else
2444     gcc_unreachable ();
2445 
2446   return GS_ALL_DONE;
2447 }
2448 
2449 /* Gimplify the LABEL_EXPR pointed to by EXPR_P.  */
2450 
2451 static enum gimplify_status
2452 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2453 {
2454   gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2455 	      == current_function_decl);
2456 
2457   tree label = LABEL_EXPR_LABEL (*expr_p);
2458   glabel *label_stmt = gimple_build_label (label);
2459   gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2460   gimplify_seq_add_stmt (pre_p, label_stmt);
2461 
2462   if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2463     gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2464 						      NOT_TAKEN));
2465   else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2466     gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2467 						      TAKEN));
2468 
2469   return GS_ALL_DONE;
2470 }
2471 
2472 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P.  */
2473 
2474 static enum gimplify_status
2475 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2476 {
2477   struct gimplify_ctx *ctxp;
2478   glabel *label_stmt;
2479 
2480   /* Invalid programs can play Duff's Device type games with, for example,
2481      #pragma omp parallel.  At least in the C front end, we don't
2482      detect such invalid branches until after gimplification, in the
2483      diagnose_omp_blocks pass.  */
2484   for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2485     if (ctxp->case_labels.exists ())
2486       break;
2487 
2488   label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
2489   gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2490   ctxp->case_labels.safe_push (*expr_p);
2491   gimplify_seq_add_stmt (pre_p, label_stmt);
2492 
2493   return GS_ALL_DONE;
2494 }
2495 
2496 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2497    if necessary.  */
2498 
2499 tree
2500 build_and_jump (tree *label_p)
2501 {
2502   if (label_p == NULL)
2503     /* If there's nowhere to jump, just fall through.  */
2504     return NULL_TREE;
2505 
2506   if (*label_p == NULL_TREE)
2507     {
2508       tree label = create_artificial_label (UNKNOWN_LOCATION);
2509       *label_p = label;
2510     }
2511 
2512   return build1 (GOTO_EXPR, void_type_node, *label_p);
2513 }
2514 
2515 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2516    This also involves building a label to jump to and communicating it to
2517    gimplify_loop_expr through gimplify_ctxp->exit_label.  */
2518 
2519 static enum gimplify_status
2520 gimplify_exit_expr (tree *expr_p)
2521 {
2522   tree cond = TREE_OPERAND (*expr_p, 0);
2523   tree expr;
2524 
2525   expr = build_and_jump (&gimplify_ctxp->exit_label);
2526   expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2527   *expr_p = expr;
2528 
2529   return GS_OK;
2530 }
2531 
2532 /* *EXPR_P is a COMPONENT_REF being used as an rvalue.  If its type is
2533    different from its canonical type, wrap the whole thing inside a
2534    NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2535    type.
2536 
2537    The canonical type of a COMPONENT_REF is the type of the field being
2538    referenced--unless the field is a bit-field which can be read directly
2539    in a smaller mode, in which case the canonical type is the
2540    sign-appropriate type corresponding to that mode.  */
2541 
2542 static void
2543 canonicalize_component_ref (tree *expr_p)
2544 {
2545   tree expr = *expr_p;
2546   tree type;
2547 
2548   gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2549 
2550   if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2551     type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2552   else
2553     type = TREE_TYPE (TREE_OPERAND (expr, 1));
2554 
2555   /* One could argue that all the stuff below is not necessary for
2556      the non-bitfield case and declare it a FE error if type
2557      adjustment would be needed.  */
2558   if (TREE_TYPE (expr) != type)
2559     {
2560 #ifdef ENABLE_TYPES_CHECKING
2561       tree old_type = TREE_TYPE (expr);
2562 #endif
2563       int type_quals;
2564 
2565       /* We need to preserve qualifiers and propagate them from
2566 	 operand 0.  */
2567       type_quals = TYPE_QUALS (type)
2568 	| TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2569       if (TYPE_QUALS (type) != type_quals)
2570 	type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2571 
2572       /* Set the type of the COMPONENT_REF to the underlying type.  */
2573       TREE_TYPE (expr) = type;
2574 
2575 #ifdef ENABLE_TYPES_CHECKING
2576       /* It is now a FE error, if the conversion from the canonical
2577 	 type to the original expression type is not useless.  */
2578       gcc_assert (useless_type_conversion_p (old_type, type));
2579 #endif
2580     }
2581 }
2582 
2583 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2584    to foo, embed that change in the ADDR_EXPR by converting
2585       T array[U];
2586       (T *)&array
2587    ==>
2588       &array[L]
2589    where L is the lower bound.  For simplicity, only do this for constant
2590    lower bound.
2591    The constraint is that the type of &array[L] is trivially convertible
2592    to T *.  */
2593 
2594 static void
2595 canonicalize_addr_expr (tree *expr_p)
2596 {
2597   tree expr = *expr_p;
2598   tree addr_expr = TREE_OPERAND (expr, 0);
2599   tree datype, ddatype, pddatype;
2600 
2601   /* We simplify only conversions from an ADDR_EXPR to a pointer type.  */
2602   if (!POINTER_TYPE_P (TREE_TYPE (expr))
2603       || TREE_CODE (addr_expr) != ADDR_EXPR)
2604     return;
2605 
2606   /* The addr_expr type should be a pointer to an array.  */
2607   datype = TREE_TYPE (TREE_TYPE (addr_expr));
2608   if (TREE_CODE (datype) != ARRAY_TYPE)
2609     return;
2610 
2611   /* The pointer to element type shall be trivially convertible to
2612      the expression pointer type.  */
2613   ddatype = TREE_TYPE (datype);
2614   pddatype = build_pointer_type (ddatype);
2615   if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2616 				  pddatype))
2617     return;
2618 
2619   /* The lower bound and element sizes must be constant.  */
2620   if (!TYPE_SIZE_UNIT (ddatype)
2621       || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2622       || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2623       || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2624     return;
2625 
2626   /* All checks succeeded.  Build a new node to merge the cast.  */
2627   *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2628 		    TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2629 		    NULL_TREE, NULL_TREE);
2630   *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2631 
2632   /* We can have stripped a required restrict qualifier above.  */
2633   if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2634     *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2635 }
2636 
2637 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR.  Remove it and/or other conversions
2638    underneath as appropriate.  */
2639 
2640 static enum gimplify_status
2641 gimplify_conversion (tree *expr_p)
2642 {
2643   location_t loc = EXPR_LOCATION (*expr_p);
2644   gcc_assert (CONVERT_EXPR_P (*expr_p));
2645 
2646   /* Then strip away all but the outermost conversion.  */
2647   STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2648 
2649   /* And remove the outermost conversion if it's useless.  */
2650   if (tree_ssa_useless_type_conversion (*expr_p))
2651     *expr_p = TREE_OPERAND (*expr_p, 0);
2652 
2653   /* If we still have a conversion at the toplevel,
2654      then canonicalize some constructs.  */
2655   if (CONVERT_EXPR_P (*expr_p))
2656     {
2657       tree sub = TREE_OPERAND (*expr_p, 0);
2658 
2659       /* If a NOP conversion is changing the type of a COMPONENT_REF
2660 	 expression, then canonicalize its type now in order to expose more
2661 	 redundant conversions.  */
2662       if (TREE_CODE (sub) == COMPONENT_REF)
2663 	canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2664 
2665       /* If a NOP conversion is changing a pointer to array of foo
2666 	 to a pointer to foo, embed that change in the ADDR_EXPR.  */
2667       else if (TREE_CODE (sub) == ADDR_EXPR)
2668 	canonicalize_addr_expr (expr_p);
2669     }
2670 
2671   /* If we have a conversion to a non-register type force the
2672      use of a VIEW_CONVERT_EXPR instead.  */
2673   if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2674     *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2675 			       TREE_OPERAND (*expr_p, 0));
2676 
2677   /* Canonicalize CONVERT_EXPR to NOP_EXPR.  */
2678   if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2679     TREE_SET_CODE (*expr_p, NOP_EXPR);
2680 
2681   return GS_OK;
2682 }
2683 
2684 /* Nonlocal VLAs seen in the current function.  */
2685 static hash_set<tree> *nonlocal_vlas;
2686 
2687 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes.  */
2688 static tree nonlocal_vla_vars;
2689 
2690 /* Gimplify a VAR_DECL or PARM_DECL.  Return GS_OK if we expanded a
2691    DECL_VALUE_EXPR, and it's worth re-examining things.  */
2692 
2693 static enum gimplify_status
2694 gimplify_var_or_parm_decl (tree *expr_p)
2695 {
2696   tree decl = *expr_p;
2697 
2698   /* ??? If this is a local variable, and it has not been seen in any
2699      outer BIND_EXPR, then it's probably the result of a duplicate
2700      declaration, for which we've already issued an error.  It would
2701      be really nice if the front end wouldn't leak these at all.
2702      Currently the only known culprit is C++ destructors, as seen
2703      in g++.old-deja/g++.jason/binding.C.  */
2704   if (VAR_P (decl)
2705       && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2706       && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2707       && decl_function_context (decl) == current_function_decl)
2708     {
2709       gcc_assert (seen_error ());
2710       return GS_ERROR;
2711     }
2712 
2713   /* When within an OMP context, notice uses of variables.  */
2714   if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2715     return GS_ALL_DONE;
2716 
2717   /* If the decl is an alias for another expression, substitute it now.  */
2718   if (DECL_HAS_VALUE_EXPR_P (decl))
2719     {
2720       tree value_expr = DECL_VALUE_EXPR (decl);
2721 
2722       /* For referenced nonlocal VLAs add a decl for debugging purposes
2723 	 to the current function.  */
2724       if (VAR_P (decl)
2725 	  && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2726 	  && nonlocal_vlas != NULL
2727 	  && TREE_CODE (value_expr) == INDIRECT_REF
2728 	  && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2729 	  && decl_function_context (decl) != current_function_decl)
2730 	{
2731 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2732 	  while (ctx
2733 		 && (ctx->region_type == ORT_WORKSHARE
2734 		     || ctx->region_type == ORT_SIMD
2735 		     || ctx->region_type == ORT_ACC))
2736 	    ctx = ctx->outer_context;
2737 	  if (!ctx && !nonlocal_vlas->add (decl))
2738 	    {
2739 	      tree copy = copy_node (decl);
2740 
2741 	      lang_hooks.dup_lang_specific_decl (copy);
2742 	      SET_DECL_RTL (copy, 0);
2743 	      TREE_USED (copy) = 1;
2744 	      DECL_CHAIN (copy) = nonlocal_vla_vars;
2745 	      nonlocal_vla_vars = copy;
2746 	      SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2747 	      DECL_HAS_VALUE_EXPR_P (copy) = 1;
2748 	    }
2749 	}
2750 
2751       *expr_p = unshare_expr (value_expr);
2752       return GS_OK;
2753     }
2754 
2755   return GS_ALL_DONE;
2756 }
2757 
2758 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T.  */
2759 
2760 static void
2761 recalculate_side_effects (tree t)
2762 {
2763   enum tree_code code = TREE_CODE (t);
2764   int len = TREE_OPERAND_LENGTH (t);
2765   int i;
2766 
2767   switch (TREE_CODE_CLASS (code))
2768     {
2769     case tcc_expression:
2770       switch (code)
2771 	{
2772 	case INIT_EXPR:
2773 	case MODIFY_EXPR:
2774 	case VA_ARG_EXPR:
2775 	case PREDECREMENT_EXPR:
2776 	case PREINCREMENT_EXPR:
2777 	case POSTDECREMENT_EXPR:
2778 	case POSTINCREMENT_EXPR:
2779 	  /* All of these have side-effects, no matter what their
2780 	     operands are.  */
2781 	  return;
2782 
2783 	default:
2784 	  break;
2785 	}
2786       /* Fall through.  */
2787 
2788     case tcc_comparison:  /* a comparison expression */
2789     case tcc_unary:       /* a unary arithmetic expression */
2790     case tcc_binary:      /* a binary arithmetic expression */
2791     case tcc_reference:   /* a reference */
2792     case tcc_vl_exp:        /* a function call */
2793       TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2794       for (i = 0; i < len; ++i)
2795 	{
2796 	  tree op = TREE_OPERAND (t, i);
2797 	  if (op && TREE_SIDE_EFFECTS (op))
2798 	    TREE_SIDE_EFFECTS (t) = 1;
2799 	}
2800       break;
2801 
2802     case tcc_constant:
2803       /* No side-effects.  */
2804       return;
2805 
2806     default:
2807       gcc_unreachable ();
2808    }
2809 }
2810 
2811 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2812    node *EXPR_P.
2813 
2814       compound_lval
2815 	      : min_lval '[' val ']'
2816 	      | min_lval '.' ID
2817 	      | compound_lval '[' val ']'
2818 	      | compound_lval '.' ID
2819 
2820    This is not part of the original SIMPLE definition, which separates
2821    array and member references, but it seems reasonable to handle them
2822    together.  Also, this way we don't run into problems with union
2823    aliasing; gcc requires that for accesses through a union to alias, the
2824    union reference must be explicit, which was not always the case when we
2825    were splitting up array and member refs.
2826 
2827    PRE_P points to the sequence where side effects that must happen before
2828      *EXPR_P should be stored.
2829 
2830    POST_P points to the sequence where side effects that must happen after
2831      *EXPR_P should be stored.  */
2832 
2833 static enum gimplify_status
2834 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2835 			fallback_t fallback)
2836 {
2837   tree *p;
2838   enum gimplify_status ret = GS_ALL_DONE, tret;
2839   int i;
2840   location_t loc = EXPR_LOCATION (*expr_p);
2841   tree expr = *expr_p;
2842 
2843   /* Create a stack of the subexpressions so later we can walk them in
2844      order from inner to outer.  */
2845   auto_vec<tree, 10> expr_stack;
2846 
2847   /* We can handle anything that get_inner_reference can deal with.  */
2848   for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2849     {
2850     restart:
2851       /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs.  */
2852       if (TREE_CODE (*p) == INDIRECT_REF)
2853 	*p = fold_indirect_ref_loc (loc, *p);
2854 
2855       if (handled_component_p (*p))
2856 	;
2857       /* Expand DECL_VALUE_EXPR now.  In some cases that may expose
2858 	 additional COMPONENT_REFs.  */
2859       else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2860 	       && gimplify_var_or_parm_decl (p) == GS_OK)
2861 	goto restart;
2862       else
2863 	break;
2864 
2865       expr_stack.safe_push (*p);
2866     }
2867 
2868   gcc_assert (expr_stack.length ());
2869 
2870   /* Now EXPR_STACK is a stack of pointers to all the refs we've
2871      walked through and P points to the innermost expression.
2872 
2873      Java requires that we elaborated nodes in source order.  That
2874      means we must gimplify the inner expression followed by each of
2875      the indices, in order.  But we can't gimplify the inner
2876      expression until we deal with any variable bounds, sizes, or
2877      positions in order to deal with PLACEHOLDER_EXPRs.
2878 
2879      So we do this in three steps.  First we deal with the annotations
2880      for any variables in the components, then we gimplify the base,
2881      then we gimplify any indices, from left to right.  */
2882   for (i = expr_stack.length () - 1; i >= 0; i--)
2883     {
2884       tree t = expr_stack[i];
2885 
2886       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2887 	{
2888 	  /* Gimplify the low bound and element type size and put them into
2889 	     the ARRAY_REF.  If these values are set, they have already been
2890 	     gimplified.  */
2891 	  if (TREE_OPERAND (t, 2) == NULL_TREE)
2892 	    {
2893 	      tree low = unshare_expr (array_ref_low_bound (t));
2894 	      if (!is_gimple_min_invariant (low))
2895 		{
2896 		  TREE_OPERAND (t, 2) = low;
2897 		  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2898 					post_p, is_gimple_reg,
2899 					fb_rvalue);
2900 		  ret = MIN (ret, tret);
2901 		}
2902 	    }
2903 	  else
2904 	    {
2905 	      tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2906 				    is_gimple_reg, fb_rvalue);
2907 	      ret = MIN (ret, tret);
2908 	    }
2909 
2910 	  if (TREE_OPERAND (t, 3) == NULL_TREE)
2911 	    {
2912 	      tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2913 	      tree elmt_size = unshare_expr (array_ref_element_size (t));
2914 	      tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2915 
2916 	      /* Divide the element size by the alignment of the element
2917 		 type (above).  */
2918 	      elmt_size
2919 		= size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2920 
2921 	      if (!is_gimple_min_invariant (elmt_size))
2922 		{
2923 		  TREE_OPERAND (t, 3) = elmt_size;
2924 		  tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2925 					post_p, is_gimple_reg,
2926 					fb_rvalue);
2927 		  ret = MIN (ret, tret);
2928 		}
2929 	    }
2930 	  else
2931 	    {
2932 	      tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2933 				    is_gimple_reg, fb_rvalue);
2934 	      ret = MIN (ret, tret);
2935 	    }
2936 	}
2937       else if (TREE_CODE (t) == COMPONENT_REF)
2938 	{
2939 	  /* Set the field offset into T and gimplify it.  */
2940 	  if (TREE_OPERAND (t, 2) == NULL_TREE)
2941 	    {
2942 	      tree offset = unshare_expr (component_ref_field_offset (t));
2943 	      tree field = TREE_OPERAND (t, 1);
2944 	      tree factor
2945 		= size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2946 
2947 	      /* Divide the offset by its alignment.  */
2948 	      offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2949 
2950 	      if (!is_gimple_min_invariant (offset))
2951 		{
2952 		  TREE_OPERAND (t, 2) = offset;
2953 		  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2954 					post_p, is_gimple_reg,
2955 					fb_rvalue);
2956 		  ret = MIN (ret, tret);
2957 		}
2958 	    }
2959 	  else
2960 	    {
2961 	      tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2962 				    is_gimple_reg, fb_rvalue);
2963 	      ret = MIN (ret, tret);
2964 	    }
2965 	}
2966     }
2967 
2968   /* Step 2 is to gimplify the base expression.  Make sure lvalue is set
2969      so as to match the min_lval predicate.  Failure to do so may result
2970      in the creation of large aggregate temporaries.  */
2971   tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2972 			fallback | fb_lvalue);
2973   ret = MIN (ret, tret);
2974 
2975   /* And finally, the indices and operands of ARRAY_REF.  During this
2976      loop we also remove any useless conversions.  */
2977   for (; expr_stack.length () > 0; )
2978     {
2979       tree t = expr_stack.pop ();
2980 
2981       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2982 	{
2983 	  /* Gimplify the dimension.  */
2984 	  if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2985 	    {
2986 	      tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2987 				    is_gimple_val, fb_rvalue);
2988 	      ret = MIN (ret, tret);
2989 	    }
2990 	}
2991 
2992       STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2993 
2994       /* The innermost expression P may have originally had
2995 	 TREE_SIDE_EFFECTS set which would have caused all the outer
2996 	 expressions in *EXPR_P leading to P to also have had
2997 	 TREE_SIDE_EFFECTS set.  */
2998       recalculate_side_effects (t);
2999     }
3000 
3001   /* If the outermost expression is a COMPONENT_REF, canonicalize its type.  */
3002   if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3003     {
3004       canonicalize_component_ref (expr_p);
3005     }
3006 
3007   expr_stack.release ();
3008 
3009   gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3010 
3011   return ret;
3012 }
3013 
3014 /*  Gimplify the self modifying expression pointed to by EXPR_P
3015     (++, --, +=, -=).
3016 
3017     PRE_P points to the list where side effects that must happen before
3018 	*EXPR_P should be stored.
3019 
3020     POST_P points to the list where side effects that must happen after
3021 	*EXPR_P should be stored.
3022 
3023     WANT_VALUE is nonzero iff we want to use the value of this expression
3024 	in another expression.
3025 
3026     ARITH_TYPE is the type the computation should be performed in.  */
3027 
3028 enum gimplify_status
3029 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3030 			bool want_value, tree arith_type)
3031 {
3032   enum tree_code code;
3033   tree lhs, lvalue, rhs, t1;
3034   gimple_seq post = NULL, *orig_post_p = post_p;
3035   bool postfix;
3036   enum tree_code arith_code;
3037   enum gimplify_status ret;
3038   location_t loc = EXPR_LOCATION (*expr_p);
3039 
3040   code = TREE_CODE (*expr_p);
3041 
3042   gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3043 	      || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3044 
3045   /* Prefix or postfix?  */
3046   if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3047     /* Faster to treat as prefix if result is not used.  */
3048     postfix = want_value;
3049   else
3050     postfix = false;
3051 
3052   /* For postfix, make sure the inner expression's post side effects
3053      are executed after side effects from this expression.  */
3054   if (postfix)
3055     post_p = &post;
3056 
3057   /* Add or subtract?  */
3058   if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3059     arith_code = PLUS_EXPR;
3060   else
3061     arith_code = MINUS_EXPR;
3062 
3063   /* Gimplify the LHS into a GIMPLE lvalue.  */
3064   lvalue = TREE_OPERAND (*expr_p, 0);
3065   ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3066   if (ret == GS_ERROR)
3067     return ret;
3068 
3069   /* Extract the operands to the arithmetic operation.  */
3070   lhs = lvalue;
3071   rhs = TREE_OPERAND (*expr_p, 1);
3072 
3073   /* For postfix operator, we evaluate the LHS to an rvalue and then use
3074      that as the result value and in the postqueue operation.  */
3075   if (postfix)
3076     {
3077       ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3078       if (ret == GS_ERROR)
3079 	return ret;
3080 
3081       lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
3082     }
3083 
3084   /* For POINTERs increment, use POINTER_PLUS_EXPR.  */
3085   if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3086     {
3087       rhs = convert_to_ptrofftype_loc (loc, rhs);
3088       if (arith_code == MINUS_EXPR)
3089 	rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3090       t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3091     }
3092   else
3093     t1 = fold_convert (TREE_TYPE (*expr_p),
3094 		       fold_build2 (arith_code, arith_type,
3095 				    fold_convert (arith_type, lhs),
3096 				    fold_convert (arith_type, rhs)));
3097 
3098   if (postfix)
3099     {
3100       gimplify_assign (lvalue, t1, pre_p);
3101       gimplify_seq_add_seq (orig_post_p, post);
3102       *expr_p = lhs;
3103       return GS_ALL_DONE;
3104     }
3105   else
3106     {
3107       *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3108       return GS_OK;
3109     }
3110 }
3111 
3112 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR.  */
3113 
3114 static void
3115 maybe_with_size_expr (tree *expr_p)
3116 {
3117   tree expr = *expr_p;
3118   tree type = TREE_TYPE (expr);
3119   tree size;
3120 
3121   /* If we've already wrapped this or the type is error_mark_node, we can't do
3122      anything.  */
3123   if (TREE_CODE (expr) == WITH_SIZE_EXPR
3124       || type == error_mark_node)
3125     return;
3126 
3127   /* If the size isn't known or is a constant, we have nothing to do.  */
3128   size = TYPE_SIZE_UNIT (type);
3129   if (!size || poly_int_tree_p (size))
3130     return;
3131 
3132   /* Otherwise, make a WITH_SIZE_EXPR.  */
3133   size = unshare_expr (size);
3134   size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3135   *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3136 }
3137 
3138 /* Helper for gimplify_call_expr.  Gimplify a single argument *ARG_P
3139    Store any side-effects in PRE_P.  CALL_LOCATION is the location of
3140    the CALL_EXPR.  If ALLOW_SSA is set the actual parameter may be
3141    gimplified to an SSA name.  */
3142 
3143 enum gimplify_status
3144 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3145 	      bool allow_ssa)
3146 {
3147   bool (*test) (tree);
3148   fallback_t fb;
3149 
3150   /* In general, we allow lvalues for function arguments to avoid
3151      extra overhead of copying large aggregates out of even larger
3152      aggregates into temporaries only to copy the temporaries to
3153      the argument list.  Make optimizers happy by pulling out to
3154      temporaries those types that fit in registers.  */
3155   if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3156     test = is_gimple_val, fb = fb_rvalue;
3157   else
3158     {
3159       test = is_gimple_lvalue, fb = fb_either;
3160       /* Also strip a TARGET_EXPR that would force an extra copy.  */
3161       if (TREE_CODE (*arg_p) == TARGET_EXPR)
3162 	{
3163 	  tree init = TARGET_EXPR_INITIAL (*arg_p);
3164 	  if (init
3165 	      && !VOID_TYPE_P (TREE_TYPE (init)))
3166 	    *arg_p = init;
3167 	}
3168     }
3169 
3170   /* If this is a variable sized type, we must remember the size.  */
3171   maybe_with_size_expr (arg_p);
3172 
3173   /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c.  */
3174   /* Make sure arguments have the same location as the function call
3175      itself.  */
3176   protected_set_expr_location (*arg_p, call_location);
3177 
3178   /* There is a sequence point before a function call.  Side effects in
3179      the argument list must occur before the actual call. So, when
3180      gimplifying arguments, force gimplify_expr to use an internal
3181      post queue which is then appended to the end of PRE_P.  */
3182   return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3183 }
3184 
3185 /* Don't fold inside offloading or taskreg regions: it can break code by
3186    adding decl references that weren't in the source.  We'll do it during
3187    omplower pass instead.  */
3188 
3189 static bool
3190 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3191 {
3192   struct gimplify_omp_ctx *ctx;
3193   for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3194     if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3195       return false;
3196   return fold_stmt (gsi);
3197 }
3198 
3199 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3200    WANT_VALUE is true if the result of the call is desired.  */
3201 
3202 static enum gimplify_status
3203 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3204 {
3205   tree fndecl, parms, p, fnptrtype;
3206   enum gimplify_status ret;
3207   int i, nargs;
3208   gcall *call;
3209   bool builtin_va_start_p = false;
3210   location_t loc = EXPR_LOCATION (*expr_p);
3211 
3212   gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3213 
3214   /* For reliable diagnostics during inlining, it is necessary that
3215      every call_expr be annotated with file and line.  */
3216   if (! EXPR_HAS_LOCATION (*expr_p))
3217     SET_EXPR_LOCATION (*expr_p, input_location);
3218 
3219   /* Gimplify internal functions created in the FEs.  */
3220   if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3221     {
3222       if (want_value)
3223 	return GS_ALL_DONE;
3224 
3225       nargs = call_expr_nargs (*expr_p);
3226       enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3227       auto_vec<tree> vargs (nargs);
3228 
3229       for (i = 0; i < nargs; i++)
3230 	{
3231 	  gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3232 			EXPR_LOCATION (*expr_p));
3233 	  vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3234 	}
3235 
3236       gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3237       gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3238       gimplify_seq_add_stmt (pre_p, call);
3239       return GS_ALL_DONE;
3240     }
3241 
3242   /* This may be a call to a builtin function.
3243 
3244      Builtin function calls may be transformed into different
3245      (and more efficient) builtin function calls under certain
3246      circumstances.  Unfortunately, gimplification can muck things
3247      up enough that the builtin expanders are not aware that certain
3248      transformations are still valid.
3249 
3250      So we attempt transformation/gimplification of the call before
3251      we gimplify the CALL_EXPR.  At this time we do not manage to
3252      transform all calls in the same manner as the expanders do, but
3253      we do transform most of them.  */
3254   fndecl = get_callee_fndecl (*expr_p);
3255   if (fndecl
3256       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3257     switch (DECL_FUNCTION_CODE (fndecl))
3258       {
3259       CASE_BUILT_IN_ALLOCA:
3260 	/* If the call has been built for a variable-sized object, then we
3261 	   want to restore the stack level when the enclosing BIND_EXPR is
3262 	   exited to reclaim the allocated space; otherwise, we precisely
3263 	   need to do the opposite and preserve the latest stack level.  */
3264 	if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3265 	  gimplify_ctxp->save_stack = true;
3266 	else
3267 	  gimplify_ctxp->keep_stack = true;
3268 	break;
3269 
3270       case BUILT_IN_VA_START:
3271         {
3272 	  builtin_va_start_p = TRUE;
3273 	  if (call_expr_nargs (*expr_p) < 2)
3274 	    {
3275 	      error ("too few arguments to function %<va_start%>");
3276 	      *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3277 	      return GS_OK;
3278 	    }
3279 
3280 	  if (fold_builtin_next_arg (*expr_p, true))
3281 	    {
3282 	      *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3283 	      return GS_OK;
3284 	    }
3285 	  break;
3286 	}
3287 
3288       default:
3289         ;
3290       }
3291   if (fndecl && DECL_BUILT_IN (fndecl))
3292     {
3293       tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3294       if (new_tree && new_tree != *expr_p)
3295 	{
3296 	  /* There was a transformation of this call which computes the
3297 	     same value, but in a more efficient way.  Return and try
3298 	     again.  */
3299 	  *expr_p = new_tree;
3300 	  return GS_OK;
3301 	}
3302     }
3303 
3304   /* Remember the original function pointer type.  */
3305   fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3306 
3307   /* There is a sequence point before the call, so any side effects in
3308      the calling expression must occur before the actual call.  Force
3309      gimplify_expr to use an internal post queue.  */
3310   ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3311 		       is_gimple_call_addr, fb_rvalue);
3312 
3313   nargs = call_expr_nargs (*expr_p);
3314 
3315   /* Get argument types for verification.  */
3316   fndecl = get_callee_fndecl (*expr_p);
3317   parms = NULL_TREE;
3318   if (fndecl)
3319     parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3320   else
3321     parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3322 
3323   if (fndecl && DECL_ARGUMENTS (fndecl))
3324     p = DECL_ARGUMENTS (fndecl);
3325   else if (parms)
3326     p = parms;
3327   else
3328     p = NULL_TREE;
3329   for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3330     ;
3331 
3332   /* If the last argument is __builtin_va_arg_pack () and it is not
3333      passed as a named argument, decrease the number of CALL_EXPR
3334      arguments and set instead the CALL_EXPR_VA_ARG_PACK flag.  */
3335   if (!p
3336       && i < nargs
3337       && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3338     {
3339       tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3340       tree last_arg_fndecl = get_callee_fndecl (last_arg);
3341 
3342       if (last_arg_fndecl
3343 	  && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
3344 	  && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
3345 	  && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
3346 	{
3347 	  tree call = *expr_p;
3348 
3349 	  --nargs;
3350 	  *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3351 					  CALL_EXPR_FN (call),
3352 					  nargs, CALL_EXPR_ARGP (call));
3353 
3354 	  /* Copy all CALL_EXPR flags, location and block, except
3355 	     CALL_EXPR_VA_ARG_PACK flag.  */
3356 	  CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3357 	  CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3358 	  CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3359 	    = CALL_EXPR_RETURN_SLOT_OPT (call);
3360 	  CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3361 	  SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3362 
3363 	  /* Set CALL_EXPR_VA_ARG_PACK.  */
3364 	  CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3365 	}
3366     }
3367 
3368   /* If the call returns twice then after building the CFG the call
3369      argument computations will no longer dominate the call because
3370      we add an abnormal incoming edge to the call.  So do not use SSA
3371      vars there.  */
3372   bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3373 
3374   /* Gimplify the function arguments.  */
3375   if (nargs > 0)
3376     {
3377       for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3378            PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3379            PUSH_ARGS_REVERSED ? i-- : i++)
3380         {
3381           enum gimplify_status t;
3382 
3383           /* Avoid gimplifying the second argument to va_start, which needs to
3384              be the plain PARM_DECL.  */
3385           if ((i != 1) || !builtin_va_start_p)
3386             {
3387               t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3388 				EXPR_LOCATION (*expr_p), ! returns_twice);
3389 
3390               if (t == GS_ERROR)
3391                 ret = GS_ERROR;
3392             }
3393         }
3394     }
3395 
3396   /* Gimplify the static chain.  */
3397   if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3398     {
3399       if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3400 	CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3401       else
3402 	{
3403 	  enum gimplify_status t;
3404 	  t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3405 			    EXPR_LOCATION (*expr_p), ! returns_twice);
3406 	  if (t == GS_ERROR)
3407 	    ret = GS_ERROR;
3408 	}
3409     }
3410 
3411   /* Verify the function result.  */
3412   if (want_value && fndecl
3413       && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3414     {
3415       error_at (loc, "using result of function returning %<void%>");
3416       ret = GS_ERROR;
3417     }
3418 
3419   /* Try this again in case gimplification exposed something.  */
3420   if (ret != GS_ERROR)
3421     {
3422       tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3423 
3424       if (new_tree && new_tree != *expr_p)
3425 	{
3426 	  /* There was a transformation of this call which computes the
3427 	     same value, but in a more efficient way.  Return and try
3428 	     again.  */
3429 	  *expr_p = new_tree;
3430 	  return GS_OK;
3431 	}
3432     }
3433   else
3434     {
3435       *expr_p = error_mark_node;
3436       return GS_ERROR;
3437     }
3438 
3439   /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3440      decl.  This allows us to eliminate redundant or useless
3441      calls to "const" functions.  */
3442   if (TREE_CODE (*expr_p) == CALL_EXPR)
3443     {
3444       int flags = call_expr_flags (*expr_p);
3445       if (flags & (ECF_CONST | ECF_PURE)
3446 	  /* An infinite loop is considered a side effect.  */
3447 	  && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3448 	TREE_SIDE_EFFECTS (*expr_p) = 0;
3449     }
3450 
3451   /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3452      and clear *EXPR_P.  Otherwise, leave *EXPR_P in its gimplified
3453      form and delegate the creation of a GIMPLE_CALL to
3454      gimplify_modify_expr.  This is always possible because when
3455      WANT_VALUE is true, the caller wants the result of this call into
3456      a temporary, which means that we will emit an INIT_EXPR in
3457      internal_get_tmp_var which will then be handled by
3458      gimplify_modify_expr.  */
3459   if (!want_value)
3460     {
3461       /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3462 	 have to do is replicate it as a GIMPLE_CALL tuple.  */
3463       gimple_stmt_iterator gsi;
3464       call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3465       notice_special_calls (call);
3466       gimplify_seq_add_stmt (pre_p, call);
3467       gsi = gsi_last (*pre_p);
3468       maybe_fold_stmt (&gsi);
3469       *expr_p = NULL_TREE;
3470     }
3471   else
3472     /* Remember the original function type.  */
3473     CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3474 				     CALL_EXPR_FN (*expr_p));
3475 
3476   return ret;
3477 }
3478 
3479 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3480    rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3481 
3482    TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3483    condition is true or false, respectively.  If null, we should generate
3484    our own to skip over the evaluation of this specific expression.
3485 
3486    LOCUS is the source location of the COND_EXPR.
3487 
3488    This function is the tree equivalent of do_jump.
3489 
3490    shortcut_cond_r should only be called by shortcut_cond_expr.  */
3491 
3492 static tree
3493 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3494 		 location_t locus)
3495 {
3496   tree local_label = NULL_TREE;
3497   tree t, expr = NULL;
3498 
3499   /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3500      retain the shortcut semantics.  Just insert the gotos here;
3501      shortcut_cond_expr will append the real blocks later.  */
3502   if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3503     {
3504       location_t new_locus;
3505 
3506       /* Turn if (a && b) into
3507 
3508 	 if (a); else goto no;
3509 	 if (b) goto yes; else goto no;
3510 	 (no:) */
3511 
3512       if (false_label_p == NULL)
3513 	false_label_p = &local_label;
3514 
3515       /* Keep the original source location on the first 'if'.  */
3516       t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3517       append_to_statement_list (t, &expr);
3518 
3519       /* Set the source location of the && on the second 'if'.  */
3520       new_locus = rexpr_location (pred, locus);
3521       t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3522 			   new_locus);
3523       append_to_statement_list (t, &expr);
3524     }
3525   else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3526     {
3527       location_t new_locus;
3528 
3529       /* Turn if (a || b) into
3530 
3531 	 if (a) goto yes;
3532 	 if (b) goto yes; else goto no;
3533 	 (yes:) */
3534 
3535       if (true_label_p == NULL)
3536 	true_label_p = &local_label;
3537 
3538       /* Keep the original source location on the first 'if'.  */
3539       t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3540       append_to_statement_list (t, &expr);
3541 
3542       /* Set the source location of the || on the second 'if'.  */
3543       new_locus = rexpr_location (pred, locus);
3544       t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3545 			   new_locus);
3546       append_to_statement_list (t, &expr);
3547     }
3548   else if (TREE_CODE (pred) == COND_EXPR
3549 	   && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3550 	   && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3551     {
3552       location_t new_locus;
3553 
3554       /* As long as we're messing with gotos, turn if (a ? b : c) into
3555 	 if (a)
3556 	   if (b) goto yes; else goto no;
3557 	 else
3558 	   if (c) goto yes; else goto no;
3559 
3560 	 Don't do this if one of the arms has void type, which can happen
3561 	 in C++ when the arm is throw.  */
3562 
3563       /* Keep the original source location on the first 'if'.  Set the source
3564 	 location of the ? on the second 'if'.  */
3565       new_locus = rexpr_location (pred, locus);
3566       expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3567 		     shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3568 				      false_label_p, locus),
3569 		     shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3570 				      false_label_p, new_locus));
3571     }
3572   else
3573     {
3574       expr = build3 (COND_EXPR, void_type_node, pred,
3575 		     build_and_jump (true_label_p),
3576 		     build_and_jump (false_label_p));
3577       SET_EXPR_LOCATION (expr, locus);
3578     }
3579 
3580   if (local_label)
3581     {
3582       t = build1 (LABEL_EXPR, void_type_node, local_label);
3583       append_to_statement_list (t, &expr);
3584     }
3585 
3586   return expr;
3587 }
3588 
3589 /* If EXPR is a GOTO_EXPR, return it.  If it is a STATEMENT_LIST, skip
3590    any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3591    statement, if it is the last one.  Otherwise, return NULL.  */
3592 
3593 static tree
3594 find_goto (tree expr)
3595 {
3596   if (!expr)
3597     return NULL_TREE;
3598 
3599   if (TREE_CODE (expr) == GOTO_EXPR)
3600     return expr;
3601 
3602   if (TREE_CODE (expr) != STATEMENT_LIST)
3603     return NULL_TREE;
3604 
3605   tree_stmt_iterator i = tsi_start (expr);
3606 
3607   while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3608     tsi_next (&i);
3609 
3610   if (!tsi_one_before_end_p (i))
3611     return NULL_TREE;
3612 
3613   return find_goto (tsi_stmt (i));
3614 }
3615 
3616 /* Same as find_goto, except that it returns NULL if the destination
3617    is not a LABEL_DECL.  */
3618 
3619 static inline tree
3620 find_goto_label (tree expr)
3621 {
3622   tree dest = find_goto (expr);
3623   if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3624     return dest;
3625   return NULL_TREE;
3626 }
3627 
3628 /* Given a conditional expression EXPR with short-circuit boolean
3629    predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3630    predicate apart into the equivalent sequence of conditionals.  */
3631 
3632 static tree
3633 shortcut_cond_expr (tree expr)
3634 {
3635   tree pred = TREE_OPERAND (expr, 0);
3636   tree then_ = TREE_OPERAND (expr, 1);
3637   tree else_ = TREE_OPERAND (expr, 2);
3638   tree true_label, false_label, end_label, t;
3639   tree *true_label_p;
3640   tree *false_label_p;
3641   bool emit_end, emit_false, jump_over_else;
3642   bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3643   bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3644 
3645   /* First do simple transformations.  */
3646   if (!else_se)
3647     {
3648       /* If there is no 'else', turn
3649 	   if (a && b) then c
3650 	 into
3651 	   if (a) if (b) then c.  */
3652       while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3653 	{
3654 	  /* Keep the original source location on the first 'if'.  */
3655 	  location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3656 	  TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3657 	  /* Set the source location of the && on the second 'if'.  */
3658 	  if (rexpr_has_location (pred))
3659 	    SET_EXPR_LOCATION (expr, rexpr_location (pred));
3660 	  then_ = shortcut_cond_expr (expr);
3661 	  then_se = then_ && TREE_SIDE_EFFECTS (then_);
3662 	  pred = TREE_OPERAND (pred, 0);
3663 	  expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3664 	  SET_EXPR_LOCATION (expr, locus);
3665 	}
3666     }
3667 
3668   if (!then_se)
3669     {
3670       /* If there is no 'then', turn
3671 	   if (a || b); else d
3672 	 into
3673 	   if (a); else if (b); else d.  */
3674       while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3675 	{
3676 	  /* Keep the original source location on the first 'if'.  */
3677 	  location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3678 	  TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3679 	  /* Set the source location of the || on the second 'if'.  */
3680 	  if (rexpr_has_location (pred))
3681 	    SET_EXPR_LOCATION (expr, rexpr_location (pred));
3682 	  else_ = shortcut_cond_expr (expr);
3683 	  else_se = else_ && TREE_SIDE_EFFECTS (else_);
3684 	  pred = TREE_OPERAND (pred, 0);
3685 	  expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3686 	  SET_EXPR_LOCATION (expr, locus);
3687 	}
3688     }
3689 
3690   /* If we're done, great.  */
3691   if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3692       && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3693     return expr;
3694 
3695   /* Otherwise we need to mess with gotos.  Change
3696        if (a) c; else d;
3697      to
3698        if (a); else goto no;
3699        c; goto end;
3700        no: d; end:
3701      and recursively gimplify the condition.  */
3702 
3703   true_label = false_label = end_label = NULL_TREE;
3704 
3705   /* If our arms just jump somewhere, hijack those labels so we don't
3706      generate jumps to jumps.  */
3707 
3708   if (tree then_goto = find_goto_label (then_))
3709     {
3710       true_label = GOTO_DESTINATION (then_goto);
3711       then_ = NULL;
3712       then_se = false;
3713     }
3714 
3715   if (tree else_goto = find_goto_label (else_))
3716     {
3717       false_label = GOTO_DESTINATION (else_goto);
3718       else_ = NULL;
3719       else_se = false;
3720     }
3721 
3722   /* If we aren't hijacking a label for the 'then' branch, it falls through.  */
3723   if (true_label)
3724     true_label_p = &true_label;
3725   else
3726     true_label_p = NULL;
3727 
3728   /* The 'else' branch also needs a label if it contains interesting code.  */
3729   if (false_label || else_se)
3730     false_label_p = &false_label;
3731   else
3732     false_label_p = NULL;
3733 
3734   /* If there was nothing else in our arms, just forward the label(s).  */
3735   if (!then_se && !else_se)
3736     return shortcut_cond_r (pred, true_label_p, false_label_p,
3737 			    EXPR_LOC_OR_LOC (expr, input_location));
3738 
3739   /* If our last subexpression already has a terminal label, reuse it.  */
3740   if (else_se)
3741     t = expr_last (else_);
3742   else if (then_se)
3743     t = expr_last (then_);
3744   else
3745     t = NULL;
3746   if (t && TREE_CODE (t) == LABEL_EXPR)
3747     end_label = LABEL_EXPR_LABEL (t);
3748 
3749   /* If we don't care about jumping to the 'else' branch, jump to the end
3750      if the condition is false.  */
3751   if (!false_label_p)
3752     false_label_p = &end_label;
3753 
3754   /* We only want to emit these labels if we aren't hijacking them.  */
3755   emit_end = (end_label == NULL_TREE);
3756   emit_false = (false_label == NULL_TREE);
3757 
3758   /* We only emit the jump over the else clause if we have to--if the
3759      then clause may fall through.  Otherwise we can wind up with a
3760      useless jump and a useless label at the end of gimplified code,
3761      which will cause us to think that this conditional as a whole
3762      falls through even if it doesn't.  If we then inline a function
3763      which ends with such a condition, that can cause us to issue an
3764      inappropriate warning about control reaching the end of a
3765      non-void function.  */
3766   jump_over_else = block_may_fallthru (then_);
3767 
3768   pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3769 			  EXPR_LOC_OR_LOC (expr, input_location));
3770 
3771   expr = NULL;
3772   append_to_statement_list (pred, &expr);
3773 
3774   append_to_statement_list (then_, &expr);
3775   if (else_se)
3776     {
3777       if (jump_over_else)
3778 	{
3779 	  tree last = expr_last (expr);
3780 	  t = build_and_jump (&end_label);
3781 	  if (rexpr_has_location (last))
3782 	    SET_EXPR_LOCATION (t, rexpr_location (last));
3783 	  append_to_statement_list (t, &expr);
3784 	}
3785       if (emit_false)
3786 	{
3787 	  t = build1 (LABEL_EXPR, void_type_node, false_label);
3788 	  append_to_statement_list (t, &expr);
3789 	}
3790       append_to_statement_list (else_, &expr);
3791     }
3792   if (emit_end && end_label)
3793     {
3794       t = build1 (LABEL_EXPR, void_type_node, end_label);
3795       append_to_statement_list (t, &expr);
3796     }
3797 
3798   return expr;
3799 }
3800 
3801 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE.  */
3802 
3803 tree
3804 gimple_boolify (tree expr)
3805 {
3806   tree type = TREE_TYPE (expr);
3807   location_t loc = EXPR_LOCATION (expr);
3808 
3809   if (TREE_CODE (expr) == NE_EXPR
3810       && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3811       && integer_zerop (TREE_OPERAND (expr, 1)))
3812     {
3813       tree call = TREE_OPERAND (expr, 0);
3814       tree fn = get_callee_fndecl (call);
3815 
3816       /* For __builtin_expect ((long) (x), y) recurse into x as well
3817 	 if x is truth_value_p.  */
3818       if (fn
3819 	  && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3820 	  && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3821 	  && call_expr_nargs (call) == 2)
3822 	{
3823 	  tree arg = CALL_EXPR_ARG (call, 0);
3824 	  if (arg)
3825 	    {
3826 	      if (TREE_CODE (arg) == NOP_EXPR
3827 		  && TREE_TYPE (arg) == TREE_TYPE (call))
3828 		arg = TREE_OPERAND (arg, 0);
3829 	      if (truth_value_p (TREE_CODE (arg)))
3830 		{
3831 		  arg = gimple_boolify (arg);
3832 		  CALL_EXPR_ARG (call, 0)
3833 		    = fold_convert_loc (loc, TREE_TYPE (call), arg);
3834 		}
3835 	    }
3836 	}
3837     }
3838 
3839   switch (TREE_CODE (expr))
3840     {
3841     case TRUTH_AND_EXPR:
3842     case TRUTH_OR_EXPR:
3843     case TRUTH_XOR_EXPR:
3844     case TRUTH_ANDIF_EXPR:
3845     case TRUTH_ORIF_EXPR:
3846       /* Also boolify the arguments of truth exprs.  */
3847       TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3848       /* FALLTHRU */
3849 
3850     case TRUTH_NOT_EXPR:
3851       TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3852 
3853       /* These expressions always produce boolean results.  */
3854       if (TREE_CODE (type) != BOOLEAN_TYPE)
3855 	TREE_TYPE (expr) = boolean_type_node;
3856       return expr;
3857 
3858     case ANNOTATE_EXPR:
3859       switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3860 	{
3861 	case annot_expr_ivdep_kind:
3862 	case annot_expr_unroll_kind:
3863 	case annot_expr_no_vector_kind:
3864 	case annot_expr_vector_kind:
3865 	case annot_expr_parallel_kind:
3866 	  TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3867 	  if (TREE_CODE (type) != BOOLEAN_TYPE)
3868 	    TREE_TYPE (expr) = boolean_type_node;
3869 	  return expr;
3870 	default:
3871 	  gcc_unreachable ();
3872 	}
3873 
3874     default:
3875       if (COMPARISON_CLASS_P (expr))
3876 	{
3877 	  /* There expressions always prduce boolean results.  */
3878 	  if (TREE_CODE (type) != BOOLEAN_TYPE)
3879 	    TREE_TYPE (expr) = boolean_type_node;
3880 	  return expr;
3881 	}
3882       /* Other expressions that get here must have boolean values, but
3883 	 might need to be converted to the appropriate mode.  */
3884       if (TREE_CODE (type) == BOOLEAN_TYPE)
3885 	return expr;
3886       return fold_convert_loc (loc, boolean_type_node, expr);
3887     }
3888 }
3889 
3890 /* Given a conditional expression *EXPR_P without side effects, gimplify
3891    its operands.  New statements are inserted to PRE_P.  */
3892 
3893 static enum gimplify_status
3894 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3895 {
3896   tree expr = *expr_p, cond;
3897   enum gimplify_status ret, tret;
3898   enum tree_code code;
3899 
3900   cond = gimple_boolify (COND_EXPR_COND (expr));
3901 
3902   /* We need to handle && and || specially, as their gimplification
3903      creates pure cond_expr, thus leading to an infinite cycle otherwise.  */
3904   code = TREE_CODE (cond);
3905   if (code == TRUTH_ANDIF_EXPR)
3906     TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3907   else if (code == TRUTH_ORIF_EXPR)
3908     TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3909   ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3910   COND_EXPR_COND (*expr_p) = cond;
3911 
3912   tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3913 				   is_gimple_val, fb_rvalue);
3914   ret = MIN (ret, tret);
3915   tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3916 				   is_gimple_val, fb_rvalue);
3917 
3918   return MIN (ret, tret);
3919 }
3920 
3921 /* Return true if evaluating EXPR could trap.
3922    EXPR is GENERIC, while tree_could_trap_p can be called
3923    only on GIMPLE.  */
3924 
3925 static bool
3926 generic_expr_could_trap_p (tree expr)
3927 {
3928   unsigned i, n;
3929 
3930   if (!expr || is_gimple_val (expr))
3931     return false;
3932 
3933   if (!EXPR_P (expr) || tree_could_trap_p (expr))
3934     return true;
3935 
3936   n = TREE_OPERAND_LENGTH (expr);
3937   for (i = 0; i < n; i++)
3938     if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3939       return true;
3940 
3941   return false;
3942 }
3943 
3944 /*  Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3945     into
3946 
3947     if (p)			if (p)
3948       t1 = a;			  a;
3949     else		or	else
3950       t1 = b;			  b;
3951     t1;
3952 
3953     The second form is used when *EXPR_P is of type void.
3954 
3955     PRE_P points to the list where side effects that must happen before
3956       *EXPR_P should be stored.  */
3957 
3958 static enum gimplify_status
3959 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3960 {
3961   tree expr = *expr_p;
3962   tree type = TREE_TYPE (expr);
3963   location_t loc = EXPR_LOCATION (expr);
3964   tree tmp, arm1, arm2;
3965   enum gimplify_status ret;
3966   tree label_true, label_false, label_cont;
3967   bool have_then_clause_p, have_else_clause_p;
3968   gcond *cond_stmt;
3969   enum tree_code pred_code;
3970   gimple_seq seq = NULL;
3971 
3972   /* If this COND_EXPR has a value, copy the values into a temporary within
3973      the arms.  */
3974   if (!VOID_TYPE_P (type))
3975     {
3976       tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3977       tree result;
3978 
3979       /* If either an rvalue is ok or we do not require an lvalue, create the
3980 	 temporary.  But we cannot do that if the type is addressable.  */
3981       if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3982 	  && !TREE_ADDRESSABLE (type))
3983 	{
3984 	  if (gimplify_ctxp->allow_rhs_cond_expr
3985 	      /* If either branch has side effects or could trap, it can't be
3986 		 evaluated unconditionally.  */
3987 	      && !TREE_SIDE_EFFECTS (then_)
3988 	      && !generic_expr_could_trap_p (then_)
3989 	      && !TREE_SIDE_EFFECTS (else_)
3990 	      && !generic_expr_could_trap_p (else_))
3991 	    return gimplify_pure_cond_expr (expr_p, pre_p);
3992 
3993 	  tmp = create_tmp_var (type, "iftmp");
3994 	  result = tmp;
3995 	}
3996 
3997       /* Otherwise, only create and copy references to the values.  */
3998       else
3999 	{
4000 	  type = build_pointer_type (type);
4001 
4002 	  if (!VOID_TYPE_P (TREE_TYPE (then_)))
4003 	    then_ = build_fold_addr_expr_loc (loc, then_);
4004 
4005 	  if (!VOID_TYPE_P (TREE_TYPE (else_)))
4006 	    else_ = build_fold_addr_expr_loc (loc, else_);
4007 
4008 	  expr
4009 	    = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4010 
4011 	  tmp = create_tmp_var (type, "iftmp");
4012 	  result = build_simple_mem_ref_loc (loc, tmp);
4013 	}
4014 
4015       /* Build the new then clause, `tmp = then_;'.  But don't build the
4016 	 assignment if the value is void; in C++ it can be if it's a throw.  */
4017       if (!VOID_TYPE_P (TREE_TYPE (then_)))
4018 	TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
4019 
4020       /* Similarly, build the new else clause, `tmp = else_;'.  */
4021       if (!VOID_TYPE_P (TREE_TYPE (else_)))
4022 	TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
4023 
4024       TREE_TYPE (expr) = void_type_node;
4025       recalculate_side_effects (expr);
4026 
4027       /* Move the COND_EXPR to the prequeue.  */
4028       gimplify_stmt (&expr, pre_p);
4029 
4030       *expr_p = result;
4031       return GS_ALL_DONE;
4032     }
4033 
4034   /* Remove any COMPOUND_EXPR so the following cases will be caught.  */
4035   STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4036   if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4037     gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4038 
4039   /* Make sure the condition has BOOLEAN_TYPE.  */
4040   TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4041 
4042   /* Break apart && and || conditions.  */
4043   if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4044       || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4045     {
4046       expr = shortcut_cond_expr (expr);
4047 
4048       if (expr != *expr_p)
4049 	{
4050 	  *expr_p = expr;
4051 
4052 	  /* We can't rely on gimplify_expr to re-gimplify the expanded
4053 	     form properly, as cleanups might cause the target labels to be
4054 	     wrapped in a TRY_FINALLY_EXPR.  To prevent that, we need to
4055 	     set up a conditional context.  */
4056 	  gimple_push_condition ();
4057 	  gimplify_stmt (expr_p, &seq);
4058 	  gimple_pop_condition (pre_p);
4059 	  gimple_seq_add_seq (pre_p, seq);
4060 
4061 	  return GS_ALL_DONE;
4062 	}
4063     }
4064 
4065   /* Now do the normal gimplification.  */
4066 
4067   /* Gimplify condition.  */
4068   ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
4069 		       fb_rvalue);
4070   if (ret == GS_ERROR)
4071     return GS_ERROR;
4072   gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4073 
4074   gimple_push_condition ();
4075 
4076   have_then_clause_p = have_else_clause_p = false;
4077   label_true = find_goto_label (TREE_OPERAND (expr, 1));
4078   if (label_true
4079       && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4080       /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4081 	 have different locations, otherwise we end up with incorrect
4082 	 location information on the branches.  */
4083       && (optimize
4084 	  || !EXPR_HAS_LOCATION (expr)
4085 	  || !rexpr_has_location (label_true)
4086 	  || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4087     {
4088       have_then_clause_p = true;
4089       label_true = GOTO_DESTINATION (label_true);
4090     }
4091   else
4092     label_true = create_artificial_label (UNKNOWN_LOCATION);
4093   label_false = find_goto_label (TREE_OPERAND (expr, 2));
4094   if (label_false
4095       && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4096       /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4097 	 have different locations, otherwise we end up with incorrect
4098 	 location information on the branches.  */
4099       && (optimize
4100 	  || !EXPR_HAS_LOCATION (expr)
4101 	  || !rexpr_has_location (label_false)
4102 	  || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4103     {
4104       have_else_clause_p = true;
4105       label_false = GOTO_DESTINATION (label_false);
4106     }
4107   else
4108     label_false = create_artificial_label (UNKNOWN_LOCATION);
4109 
4110   gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4111 				 &arm2);
4112   cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4113 				 label_false);
4114   gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4115   gimplify_seq_add_stmt (&seq, cond_stmt);
4116   gimple_stmt_iterator gsi = gsi_last (seq);
4117   maybe_fold_stmt (&gsi);
4118 
4119   label_cont = NULL_TREE;
4120   if (!have_then_clause_p)
4121     {
4122       /* For if (...) {} else { code; } put label_true after
4123 	 the else block.  */
4124       if (TREE_OPERAND (expr, 1) == NULL_TREE
4125 	  && !have_else_clause_p
4126 	  && TREE_OPERAND (expr, 2) != NULL_TREE)
4127 	label_cont = label_true;
4128       else
4129 	{
4130 	  gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4131 	  have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4132 	  /* For if (...) { code; } else {} or
4133 	     if (...) { code; } else goto label; or
4134 	     if (...) { code; return; } else { ... }
4135 	     label_cont isn't needed.  */
4136 	  if (!have_else_clause_p
4137 	      && TREE_OPERAND (expr, 2) != NULL_TREE
4138 	      && gimple_seq_may_fallthru (seq))
4139 	    {
4140 	      gimple *g;
4141 	      label_cont = create_artificial_label (UNKNOWN_LOCATION);
4142 
4143 	      g = gimple_build_goto (label_cont);
4144 
4145 	      /* GIMPLE_COND's are very low level; they have embedded
4146 		 gotos.  This particular embedded goto should not be marked
4147 		 with the location of the original COND_EXPR, as it would
4148 		 correspond to the COND_EXPR's condition, not the ELSE or the
4149 		 THEN arms.  To avoid marking it with the wrong location, flag
4150 		 it as "no location".  */
4151 	      gimple_set_do_not_emit_location (g);
4152 
4153 	      gimplify_seq_add_stmt (&seq, g);
4154 	    }
4155 	}
4156     }
4157   if (!have_else_clause_p)
4158     {
4159       gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4160       have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4161     }
4162   if (label_cont)
4163     gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4164 
4165   gimple_pop_condition (pre_p);
4166   gimple_seq_add_seq (pre_p, seq);
4167 
4168   if (ret == GS_ERROR)
4169     ; /* Do nothing.  */
4170   else if (have_then_clause_p || have_else_clause_p)
4171     ret = GS_ALL_DONE;
4172   else
4173     {
4174       /* Both arms are empty; replace the COND_EXPR with its predicate.  */
4175       expr = TREE_OPERAND (expr, 0);
4176       gimplify_stmt (&expr, pre_p);
4177     }
4178 
4179   *expr_p = NULL;
4180   return ret;
4181 }
4182 
4183 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4184    to be marked addressable.
4185 
4186    We cannot rely on such an expression being directly markable if a temporary
4187    has been created by the gimplification.  In this case, we create another
4188    temporary and initialize it with a copy, which will become a store after we
4189    mark it addressable.  This can happen if the front-end passed us something
4190    that it could not mark addressable yet, like a Fortran pass-by-reference
4191    parameter (int) floatvar.  */
4192 
4193 static void
4194 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4195 {
4196   while (handled_component_p (*expr_p))
4197     expr_p = &TREE_OPERAND (*expr_p, 0);
4198   if (is_gimple_reg (*expr_p))
4199     {
4200       /* Do not allow an SSA name as the temporary.  */
4201       tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4202       DECL_GIMPLE_REG_P (var) = 0;
4203       *expr_p = var;
4204     }
4205 }
4206 
4207 /* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
4208    a call to __builtin_memcpy.  */
4209 
4210 static enum gimplify_status
4211 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4212     				gimple_seq *seq_p)
4213 {
4214   tree t, to, to_ptr, from, from_ptr;
4215   gcall *gs;
4216   location_t loc = EXPR_LOCATION (*expr_p);
4217 
4218   to = TREE_OPERAND (*expr_p, 0);
4219   from = TREE_OPERAND (*expr_p, 1);
4220 
4221   /* Mark the RHS addressable.  Beware that it may not be possible to do so
4222      directly if a temporary has been created by the gimplification.  */
4223   prepare_gimple_addressable (&from, seq_p);
4224 
4225   mark_addressable (from);
4226   from_ptr = build_fold_addr_expr_loc (loc, from);
4227   gimplify_arg (&from_ptr, seq_p, loc);
4228 
4229   mark_addressable (to);
4230   to_ptr = build_fold_addr_expr_loc (loc, to);
4231   gimplify_arg (&to_ptr, seq_p, loc);
4232 
4233   t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4234 
4235   gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4236 
4237   if (want_value)
4238     {
4239       /* tmp = memcpy() */
4240       t = create_tmp_var (TREE_TYPE (to_ptr));
4241       gimple_call_set_lhs (gs, t);
4242       gimplify_seq_add_stmt (seq_p, gs);
4243 
4244       *expr_p = build_simple_mem_ref (t);
4245       return GS_ALL_DONE;
4246     }
4247 
4248   gimplify_seq_add_stmt (seq_p, gs);
4249   *expr_p = NULL;
4250   return GS_ALL_DONE;
4251 }
4252 
4253 /* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
4254    a call to __builtin_memset.  In this case we know that the RHS is
4255    a CONSTRUCTOR with an empty element list.  */
4256 
4257 static enum gimplify_status
4258 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4259     				gimple_seq *seq_p)
4260 {
4261   tree t, from, to, to_ptr;
4262   gcall *gs;
4263   location_t loc = EXPR_LOCATION (*expr_p);
4264 
4265   /* Assert our assumptions, to abort instead of producing wrong code
4266      silently if they are not met.  Beware that the RHS CONSTRUCTOR might
4267      not be immediately exposed.  */
4268   from = TREE_OPERAND (*expr_p, 1);
4269   if (TREE_CODE (from) == WITH_SIZE_EXPR)
4270     from = TREE_OPERAND (from, 0);
4271 
4272   gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4273 	      && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4274 
4275   /* Now proceed.  */
4276   to = TREE_OPERAND (*expr_p, 0);
4277 
4278   to_ptr = build_fold_addr_expr_loc (loc, to);
4279   gimplify_arg (&to_ptr, seq_p, loc);
4280   t = builtin_decl_implicit (BUILT_IN_MEMSET);
4281 
4282   gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4283 
4284   if (want_value)
4285     {
4286       /* tmp = memset() */
4287       t = create_tmp_var (TREE_TYPE (to_ptr));
4288       gimple_call_set_lhs (gs, t);
4289       gimplify_seq_add_stmt (seq_p, gs);
4290 
4291       *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4292       return GS_ALL_DONE;
4293     }
4294 
4295   gimplify_seq_add_stmt (seq_p, gs);
4296   *expr_p = NULL;
4297   return GS_ALL_DONE;
4298 }
4299 
4300 /* A subroutine of gimplify_init_ctor_preeval.  Called via walk_tree,
4301    determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4302    assignment.  Return non-null if we detect a potential overlap.  */
4303 
4304 struct gimplify_init_ctor_preeval_data
4305 {
4306   /* The base decl of the lhs object.  May be NULL, in which case we
4307      have to assume the lhs is indirect.  */
4308   tree lhs_base_decl;
4309 
4310   /* The alias set of the lhs object.  */
4311   alias_set_type lhs_alias_set;
4312 };
4313 
4314 static tree
4315 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4316 {
4317   struct gimplify_init_ctor_preeval_data *data
4318     = (struct gimplify_init_ctor_preeval_data *) xdata;
4319   tree t = *tp;
4320 
4321   /* If we find the base object, obviously we have overlap.  */
4322   if (data->lhs_base_decl == t)
4323     return t;
4324 
4325   /* If the constructor component is indirect, determine if we have a
4326      potential overlap with the lhs.  The only bits of information we
4327      have to go on at this point are addressability and alias sets.  */
4328   if ((INDIRECT_REF_P (t)
4329        || TREE_CODE (t) == MEM_REF)
4330       && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4331       && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4332     return t;
4333 
4334   /* If the constructor component is a call, determine if it can hide a
4335      potential overlap with the lhs through an INDIRECT_REF like above.
4336      ??? Ugh - this is completely broken.  In fact this whole analysis
4337      doesn't look conservative.  */
4338   if (TREE_CODE (t) == CALL_EXPR)
4339     {
4340       tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4341 
4342       for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4343 	if (POINTER_TYPE_P (TREE_VALUE (type))
4344 	    && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4345 	    && alias_sets_conflict_p (data->lhs_alias_set,
4346 				      get_alias_set
4347 				        (TREE_TYPE (TREE_VALUE (type)))))
4348 	  return t;
4349     }
4350 
4351   if (IS_TYPE_OR_DECL_P (t))
4352     *walk_subtrees = 0;
4353   return NULL;
4354 }
4355 
4356 /* A subroutine of gimplify_init_constructor.  Pre-evaluate EXPR,
4357    force values that overlap with the lhs (as described by *DATA)
4358    into temporaries.  */
4359 
4360 static void
4361 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4362 			    struct gimplify_init_ctor_preeval_data *data)
4363 {
4364   enum gimplify_status one;
4365 
4366   /* If the value is constant, then there's nothing to pre-evaluate.  */
4367   if (TREE_CONSTANT (*expr_p))
4368     {
4369       /* Ensure it does not have side effects, it might contain a reference to
4370 	 the object we're initializing.  */
4371       gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4372       return;
4373     }
4374 
4375   /* If the type has non-trivial constructors, we can't pre-evaluate.  */
4376   if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4377     return;
4378 
4379   /* Recurse for nested constructors.  */
4380   if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4381     {
4382       unsigned HOST_WIDE_INT ix;
4383       constructor_elt *ce;
4384       vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4385 
4386       FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4387 	gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4388 
4389       return;
4390     }
4391 
4392   /* If this is a variable sized type, we must remember the size.  */
4393   maybe_with_size_expr (expr_p);
4394 
4395   /* Gimplify the constructor element to something appropriate for the rhs
4396      of a MODIFY_EXPR.  Given that we know the LHS is an aggregate, we know
4397      the gimplifier will consider this a store to memory.  Doing this
4398      gimplification now means that we won't have to deal with complicated
4399      language-specific trees, nor trees like SAVE_EXPR that can induce
4400      exponential search behavior.  */
4401   one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4402   if (one == GS_ERROR)
4403     {
4404       *expr_p = NULL;
4405       return;
4406     }
4407 
4408   /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4409      with the lhs, since "a = { .x=a }" doesn't make sense.  This will
4410      always be true for all scalars, since is_gimple_mem_rhs insists on a
4411      temporary variable for them.  */
4412   if (DECL_P (*expr_p))
4413     return;
4414 
4415   /* If this is of variable size, we have no choice but to assume it doesn't
4416      overlap since we can't make a temporary for it.  */
4417   if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4418     return;
4419 
4420   /* Otherwise, we must search for overlap ...  */
4421   if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4422     return;
4423 
4424   /* ... and if found, force the value into a temporary.  */
4425   *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4426 }
4427 
4428 /* A subroutine of gimplify_init_ctor_eval.  Create a loop for
4429    a RANGE_EXPR in a CONSTRUCTOR for an array.
4430 
4431       var = lower;
4432     loop_entry:
4433       object[var] = value;
4434       if (var == upper)
4435 	goto loop_exit;
4436       var = var + 1;
4437       goto loop_entry;
4438     loop_exit:
4439 
4440    We increment var _after_ the loop exit check because we might otherwise
4441    fail if upper == TYPE_MAX_VALUE (type for upper).
4442 
4443    Note that we never have to deal with SAVE_EXPRs here, because this has
4444    already been taken care of for us, in gimplify_init_ctor_preeval().  */
4445 
4446 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4447 				     gimple_seq *, bool);
4448 
4449 static void
4450 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4451 			       tree value, tree array_elt_type,
4452 			       gimple_seq *pre_p, bool cleared)
4453 {
4454   tree loop_entry_label, loop_exit_label, fall_thru_label;
4455   tree var, var_type, cref, tmp;
4456 
4457   loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4458   loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4459   fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4460 
4461   /* Create and initialize the index variable.  */
4462   var_type = TREE_TYPE (upper);
4463   var = create_tmp_var (var_type);
4464   gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4465 
4466   /* Add the loop entry label.  */
4467   gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4468 
4469   /* Build the reference.  */
4470   cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4471 		 var, NULL_TREE, NULL_TREE);
4472 
4473   /* If we are a constructor, just call gimplify_init_ctor_eval to do
4474      the store.  Otherwise just assign value to the reference.  */
4475 
4476   if (TREE_CODE (value) == CONSTRUCTOR)
4477     /* NB we might have to call ourself recursively through
4478        gimplify_init_ctor_eval if the value is a constructor.  */
4479     gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4480 			     pre_p, cleared);
4481   else
4482     gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4483 
4484   /* We exit the loop when the index var is equal to the upper bound.  */
4485   gimplify_seq_add_stmt (pre_p,
4486 			 gimple_build_cond (EQ_EXPR, var, upper,
4487 					    loop_exit_label, fall_thru_label));
4488 
4489   gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4490 
4491   /* Otherwise, increment the index var...  */
4492   tmp = build2 (PLUS_EXPR, var_type, var,
4493 		fold_convert (var_type, integer_one_node));
4494   gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4495 
4496   /* ...and jump back to the loop entry.  */
4497   gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4498 
4499   /* Add the loop exit label.  */
4500   gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4501 }
4502 
4503 /* Return true if FDECL is accessing a field that is zero sized.  */
4504 
4505 static bool
4506 zero_sized_field_decl (const_tree fdecl)
4507 {
4508   if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4509       && integer_zerop (DECL_SIZE (fdecl)))
4510     return true;
4511   return false;
4512 }
4513 
4514 /* Return true if TYPE is zero sized.  */
4515 
4516 static bool
4517 zero_sized_type (const_tree type)
4518 {
4519   if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4520       && integer_zerop (TYPE_SIZE (type)))
4521     return true;
4522   return false;
4523 }
4524 
4525 /* A subroutine of gimplify_init_constructor.  Generate individual
4526    MODIFY_EXPRs for a CONSTRUCTOR.  OBJECT is the LHS against which the
4527    assignments should happen.  ELTS is the CONSTRUCTOR_ELTS of the
4528    CONSTRUCTOR.  CLEARED is true if the entire LHS object has been
4529    zeroed first.  */
4530 
4531 static void
4532 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4533 			 gimple_seq *pre_p, bool cleared)
4534 {
4535   tree array_elt_type = NULL;
4536   unsigned HOST_WIDE_INT ix;
4537   tree purpose, value;
4538 
4539   if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4540     array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4541 
4542   FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4543     {
4544       tree cref;
4545 
4546       /* NULL values are created above for gimplification errors.  */
4547       if (value == NULL)
4548 	continue;
4549 
4550       if (cleared && initializer_zerop (value))
4551 	continue;
4552 
4553       /* ??? Here's to hoping the front end fills in all of the indices,
4554 	 so we don't have to figure out what's missing ourselves.  */
4555       gcc_assert (purpose);
4556 
4557       /* Skip zero-sized fields, unless value has side-effects.  This can
4558 	 happen with calls to functions returning a zero-sized type, which
4559 	 we shouldn't discard.  As a number of downstream passes don't
4560 	 expect sets of zero-sized fields, we rely on the gimplification of
4561 	 the MODIFY_EXPR we make below to drop the assignment statement.  */
4562       if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4563 	continue;
4564 
4565       /* If we have a RANGE_EXPR, we have to build a loop to assign the
4566 	 whole range.  */
4567       if (TREE_CODE (purpose) == RANGE_EXPR)
4568 	{
4569 	  tree lower = TREE_OPERAND (purpose, 0);
4570 	  tree upper = TREE_OPERAND (purpose, 1);
4571 
4572 	  /* If the lower bound is equal to upper, just treat it as if
4573 	     upper was the index.  */
4574 	  if (simple_cst_equal (lower, upper))
4575 	    purpose = upper;
4576 	  else
4577 	    {
4578 	      gimplify_init_ctor_eval_range (object, lower, upper, value,
4579 					     array_elt_type, pre_p, cleared);
4580 	      continue;
4581 	    }
4582 	}
4583 
4584       if (array_elt_type)
4585 	{
4586 	  /* Do not use bitsizetype for ARRAY_REF indices.  */
4587 	  if (TYPE_DOMAIN (TREE_TYPE (object)))
4588 	    purpose
4589 	      = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4590 			      purpose);
4591 	  cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4592 			 purpose, NULL_TREE, NULL_TREE);
4593 	}
4594       else
4595 	{
4596 	  gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4597 	  cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4598 			 unshare_expr (object), purpose, NULL_TREE);
4599 	}
4600 
4601       if (TREE_CODE (value) == CONSTRUCTOR
4602 	  && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4603 	gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4604 				 pre_p, cleared);
4605       else
4606 	{
4607 	  tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4608 	  gimplify_and_add (init, pre_p);
4609 	  ggc_free (init);
4610 	}
4611     }
4612 }
4613 
4614 /* Return the appropriate RHS predicate for this LHS.  */
4615 
4616 gimple_predicate
4617 rhs_predicate_for (tree lhs)
4618 {
4619   if (is_gimple_reg (lhs))
4620     return is_gimple_reg_rhs_or_call;
4621   else
4622     return is_gimple_mem_rhs_or_call;
4623 }
4624 
4625 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4626    before the LHS has been gimplified.  */
4627 
4628 static gimple_predicate
4629 initial_rhs_predicate_for (tree lhs)
4630 {
4631   if (is_gimple_reg_type (TREE_TYPE (lhs)))
4632     return is_gimple_reg_rhs_or_call;
4633   else
4634     return is_gimple_mem_rhs_or_call;
4635 }
4636 
4637 /* Gimplify a C99 compound literal expression.  This just means adding
4638    the DECL_EXPR before the current statement and using its anonymous
4639    decl instead.  */
4640 
4641 static enum gimplify_status
4642 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4643 				bool (*gimple_test_f) (tree),
4644 				fallback_t fallback)
4645 {
4646   tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4647   tree decl = DECL_EXPR_DECL (decl_s);
4648   tree init = DECL_INITIAL (decl);
4649   /* Mark the decl as addressable if the compound literal
4650      expression is addressable now, otherwise it is marked too late
4651      after we gimplify the initialization expression.  */
4652   if (TREE_ADDRESSABLE (*expr_p))
4653     TREE_ADDRESSABLE (decl) = 1;
4654   /* Otherwise, if we don't need an lvalue and have a literal directly
4655      substitute it.  Check if it matches the gimple predicate, as
4656      otherwise we'd generate a new temporary, and we can as well just
4657      use the decl we already have.  */
4658   else if (!TREE_ADDRESSABLE (decl)
4659 	   && !TREE_THIS_VOLATILE (decl)
4660 	   && init
4661 	   && (fallback & fb_lvalue) == 0
4662 	   && gimple_test_f (init))
4663     {
4664       *expr_p = init;
4665       return GS_OK;
4666     }
4667 
4668   /* Preliminarily mark non-addressed complex variables as eligible
4669      for promotion to gimple registers.  We'll transform their uses
4670      as we find them.  */
4671   if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4672        || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4673       && !TREE_THIS_VOLATILE (decl)
4674       && !needs_to_live_in_memory (decl))
4675     DECL_GIMPLE_REG_P (decl) = 1;
4676 
4677   /* If the decl is not addressable, then it is being used in some
4678      expression or on the right hand side of a statement, and it can
4679      be put into a readonly data section.  */
4680   if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4681     TREE_READONLY (decl) = 1;
4682 
4683   /* This decl isn't mentioned in the enclosing block, so add it to the
4684      list of temps.  FIXME it seems a bit of a kludge to say that
4685      anonymous artificial vars aren't pushed, but everything else is.  */
4686   if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4687     gimple_add_tmp_var (decl);
4688 
4689   gimplify_and_add (decl_s, pre_p);
4690   *expr_p = decl;
4691   return GS_OK;
4692 }
4693 
4694 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4695    return a new CONSTRUCTOR if something changed.  */
4696 
4697 static tree
4698 optimize_compound_literals_in_ctor (tree orig_ctor)
4699 {
4700   tree ctor = orig_ctor;
4701   vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4702   unsigned int idx, num = vec_safe_length (elts);
4703 
4704   for (idx = 0; idx < num; idx++)
4705     {
4706       tree value = (*elts)[idx].value;
4707       tree newval = value;
4708       if (TREE_CODE (value) == CONSTRUCTOR)
4709 	newval = optimize_compound_literals_in_ctor (value);
4710       else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4711 	{
4712 	  tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4713 	  tree decl = DECL_EXPR_DECL (decl_s);
4714 	  tree init = DECL_INITIAL (decl);
4715 
4716 	  if (!TREE_ADDRESSABLE (value)
4717 	      && !TREE_ADDRESSABLE (decl)
4718 	      && init
4719 	      && TREE_CODE (init) == CONSTRUCTOR)
4720 	    newval = optimize_compound_literals_in_ctor (init);
4721 	}
4722       if (newval == value)
4723 	continue;
4724 
4725       if (ctor == orig_ctor)
4726 	{
4727 	  ctor = copy_node (orig_ctor);
4728 	  CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4729 	  elts = CONSTRUCTOR_ELTS (ctor);
4730 	}
4731       (*elts)[idx].value = newval;
4732     }
4733   return ctor;
4734 }
4735 
4736 /* A subroutine of gimplify_modify_expr.  Break out elements of a
4737    CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4738 
4739    Note that we still need to clear any elements that don't have explicit
4740    initializers, so if not all elements are initialized we keep the
4741    original MODIFY_EXPR, we just remove all of the constructor elements.
4742 
4743    If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4744    GS_ERROR if we would have to create a temporary when gimplifying
4745    this constructor.  Otherwise, return GS_OK.
4746 
4747    If NOTIFY_TEMP_CREATION is false, just do the gimplification.  */
4748 
4749 static enum gimplify_status
4750 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4751 			   bool want_value, bool notify_temp_creation)
4752 {
4753   tree object, ctor, type;
4754   enum gimplify_status ret;
4755   vec<constructor_elt, va_gc> *elts;
4756 
4757   gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4758 
4759   if (!notify_temp_creation)
4760     {
4761       ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4762 			   is_gimple_lvalue, fb_lvalue);
4763       if (ret == GS_ERROR)
4764 	return ret;
4765     }
4766 
4767   object = TREE_OPERAND (*expr_p, 0);
4768   ctor = TREE_OPERAND (*expr_p, 1)
4769     = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4770   type = TREE_TYPE (ctor);
4771   elts = CONSTRUCTOR_ELTS (ctor);
4772   ret = GS_ALL_DONE;
4773 
4774   switch (TREE_CODE (type))
4775     {
4776     case RECORD_TYPE:
4777     case UNION_TYPE:
4778     case QUAL_UNION_TYPE:
4779     case ARRAY_TYPE:
4780       {
4781 	struct gimplify_init_ctor_preeval_data preeval_data;
4782 	HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4783 	HOST_WIDE_INT num_unique_nonzero_elements;
4784 	bool cleared, complete_p, valid_const_initializer;
4785 	/* Use readonly data for initializers of this or smaller size
4786 	   regardless of the num_nonzero_elements / num_unique_nonzero_elements
4787 	   ratio.  */
4788 	const HOST_WIDE_INT min_unique_size = 64;
4789 	/* If num_nonzero_elements / num_unique_nonzero_elements ratio
4790 	   is smaller than this, use readonly data.  */
4791 	const int unique_nonzero_ratio = 8;
4792 
4793 	/* Aggregate types must lower constructors to initialization of
4794 	   individual elements.  The exception is that a CONSTRUCTOR node
4795 	   with no elements indicates zero-initialization of the whole.  */
4796 	if (vec_safe_is_empty (elts))
4797 	  {
4798 	    if (notify_temp_creation)
4799 	      return GS_OK;
4800 	    break;
4801 	  }
4802 
4803 	/* Fetch information about the constructor to direct later processing.
4804 	   We might want to make static versions of it in various cases, and
4805 	   can only do so if it known to be a valid constant initializer.  */
4806 	valid_const_initializer
4807 	  = categorize_ctor_elements (ctor, &num_nonzero_elements,
4808 				      &num_unique_nonzero_elements,
4809 				      &num_ctor_elements, &complete_p);
4810 
4811 	/* If a const aggregate variable is being initialized, then it
4812 	   should never be a lose to promote the variable to be static.  */
4813 	if (valid_const_initializer
4814 	    && num_nonzero_elements > 1
4815 	    && TREE_READONLY (object)
4816 	    && VAR_P (object)
4817 	    && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
4818 	    /* For ctors that have many repeated nonzero elements
4819 	       represented through RANGE_EXPRs, prefer initializing
4820 	       those through runtime loops over copies of large amounts
4821 	       of data from readonly data section.  */
4822 	    && (num_unique_nonzero_elements
4823 		> num_nonzero_elements / unique_nonzero_ratio
4824 		|| ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
4825 		    <= (unsigned HOST_WIDE_INT) min_unique_size)))
4826 	  {
4827 	    if (notify_temp_creation)
4828 	      return GS_ERROR;
4829 	    DECL_INITIAL (object) = ctor;
4830 	    TREE_STATIC (object) = 1;
4831 	    if (!DECL_NAME (object))
4832 	      DECL_NAME (object) = create_tmp_var_name ("C");
4833 	    walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4834 
4835 	    /* ??? C++ doesn't automatically append a .<number> to the
4836 	       assembler name, and even when it does, it looks at FE private
4837 	       data structures to figure out what that number should be,
4838 	       which are not set for this variable.  I suppose this is
4839 	       important for local statics for inline functions, which aren't
4840 	       "local" in the object file sense.  So in order to get a unique
4841 	       TU-local symbol, we must invoke the lhd version now.  */
4842 	    lhd_set_decl_assembler_name (object);
4843 
4844 	    *expr_p = NULL_TREE;
4845 	    break;
4846 	  }
4847 
4848 	/* If there are "lots" of initialized elements, even discounting
4849 	   those that are not address constants (and thus *must* be
4850 	   computed at runtime), then partition the constructor into
4851 	   constant and non-constant parts.  Block copy the constant
4852 	   parts in, then generate code for the non-constant parts.  */
4853 	/* TODO.  There's code in cp/typeck.c to do this.  */
4854 
4855 	if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4856 	  /* store_constructor will ignore the clearing of variable-sized
4857 	     objects.  Initializers for such objects must explicitly set
4858 	     every field that needs to be set.  */
4859 	  cleared = false;
4860 	else if (!complete_p)
4861 	  /* If the constructor isn't complete, clear the whole object
4862 	     beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4863 
4864 	     ??? This ought not to be needed.  For any element not present
4865 	     in the initializer, we should simply set them to zero.  Except
4866 	     we'd need to *find* the elements that are not present, and that
4867 	     requires trickery to avoid quadratic compile-time behavior in
4868 	     large cases or excessive memory use in small cases.  */
4869 	  cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
4870 	else if (num_ctor_elements - num_nonzero_elements
4871 		 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4872 		 && num_nonzero_elements < num_ctor_elements / 4)
4873 	  /* If there are "lots" of zeros, it's more efficient to clear
4874 	     the memory and then set the nonzero elements.  */
4875 	  cleared = true;
4876 	else
4877 	  cleared = false;
4878 
4879 	/* If there are "lots" of initialized elements, and all of them
4880 	   are valid address constants, then the entire initializer can
4881 	   be dropped to memory, and then memcpy'd out.  Don't do this
4882 	   for sparse arrays, though, as it's more efficient to follow
4883 	   the standard CONSTRUCTOR behavior of memset followed by
4884 	   individual element initialization.  Also don't do this for small
4885 	   all-zero initializers (which aren't big enough to merit
4886 	   clearing), and don't try to make bitwise copies of
4887 	   TREE_ADDRESSABLE types.
4888 
4889 	   We cannot apply such transformation when compiling chkp static
4890 	   initializer because creation of initializer image in the memory
4891 	   will require static initialization of bounds for it.  It should
4892 	   result in another gimplification of similar initializer and we
4893 	   may fall into infinite loop.  */
4894 	if (valid_const_initializer
4895 	    && !(cleared || num_nonzero_elements == 0)
4896 	    && !TREE_ADDRESSABLE (type)
4897 	    && (!current_function_decl
4898 		|| !lookup_attribute ("chkp ctor",
4899 				      DECL_ATTRIBUTES (current_function_decl))))
4900 	  {
4901 	    HOST_WIDE_INT size = int_size_in_bytes (type);
4902 	    unsigned int align;
4903 
4904 	    /* ??? We can still get unbounded array types, at least
4905 	       from the C++ front end.  This seems wrong, but attempt
4906 	       to work around it for now.  */
4907 	    if (size < 0)
4908 	      {
4909 		size = int_size_in_bytes (TREE_TYPE (object));
4910 		if (size >= 0)
4911 		  TREE_TYPE (ctor) = type = TREE_TYPE (object);
4912 	      }
4913 
4914 	    /* Find the maximum alignment we can assume for the object.  */
4915 	    /* ??? Make use of DECL_OFFSET_ALIGN.  */
4916 	    if (DECL_P (object))
4917 	      align = DECL_ALIGN (object);
4918 	    else
4919 	      align = TYPE_ALIGN (type);
4920 
4921 	    /* Do a block move either if the size is so small as to make
4922 	       each individual move a sub-unit move on average, or if it
4923 	       is so large as to make individual moves inefficient.  */
4924 	    if (size > 0
4925 		&& num_nonzero_elements > 1
4926 		/* For ctors that have many repeated nonzero elements
4927 		   represented through RANGE_EXPRs, prefer initializing
4928 		   those through runtime loops over copies of large amounts
4929 		   of data from readonly data section.  */
4930 		&& (num_unique_nonzero_elements
4931 		    > num_nonzero_elements / unique_nonzero_ratio
4932 		    || size <= min_unique_size)
4933 		&& (size < num_nonzero_elements
4934 		    || !can_move_by_pieces (size, align)))
4935 	      {
4936 		if (notify_temp_creation)
4937 		  return GS_ERROR;
4938 
4939 		walk_tree (&ctor, force_labels_r, NULL, NULL);
4940 		ctor = tree_output_constant_def (ctor);
4941 		if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4942 		  ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4943 		TREE_OPERAND (*expr_p, 1) = ctor;
4944 
4945 		/* This is no longer an assignment of a CONSTRUCTOR, but
4946 		   we still may have processing to do on the LHS.  So
4947 		   pretend we didn't do anything here to let that happen.  */
4948 		return GS_UNHANDLED;
4949 	      }
4950 	  }
4951 
4952 	/* If the target is volatile, we have non-zero elements and more than
4953 	   one field to assign, initialize the target from a temporary.  */
4954 	if (TREE_THIS_VOLATILE (object)
4955 	    && !TREE_ADDRESSABLE (type)
4956 	    && (num_nonzero_elements > 0 || !cleared)
4957 	    && vec_safe_length (elts) > 1)
4958 	  {
4959 	    tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4960 	    TREE_OPERAND (*expr_p, 0) = temp;
4961 	    *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4962 			      *expr_p,
4963 			      build2 (MODIFY_EXPR, void_type_node,
4964 				      object, temp));
4965 	    return GS_OK;
4966 	  }
4967 
4968 	if (notify_temp_creation)
4969 	  return GS_OK;
4970 
4971 	/* If there are nonzero elements and if needed, pre-evaluate to capture
4972 	   elements overlapping with the lhs into temporaries.  We must do this
4973 	   before clearing to fetch the values before they are zeroed-out.  */
4974 	if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4975 	  {
4976 	    preeval_data.lhs_base_decl = get_base_address (object);
4977 	    if (!DECL_P (preeval_data.lhs_base_decl))
4978 	      preeval_data.lhs_base_decl = NULL;
4979 	    preeval_data.lhs_alias_set = get_alias_set (object);
4980 
4981 	    gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4982 					pre_p, post_p, &preeval_data);
4983 	  }
4984 
4985 	bool ctor_has_side_effects_p
4986 	  = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4987 
4988 	if (cleared)
4989 	  {
4990 	    /* Zap the CONSTRUCTOR element list, which simplifies this case.
4991 	       Note that we still have to gimplify, in order to handle the
4992 	       case of variable sized types.  Avoid shared tree structures.  */
4993 	    CONSTRUCTOR_ELTS (ctor) = NULL;
4994 	    TREE_SIDE_EFFECTS (ctor) = 0;
4995 	    object = unshare_expr (object);
4996 	    gimplify_stmt (expr_p, pre_p);
4997 	  }
4998 
4999 	/* If we have not block cleared the object, or if there are nonzero
5000 	   elements in the constructor, or if the constructor has side effects,
5001 	   add assignments to the individual scalar fields of the object.  */
5002 	if (!cleared
5003 	    || num_nonzero_elements > 0
5004 	    || ctor_has_side_effects_p)
5005 	  gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5006 
5007 	*expr_p = NULL_TREE;
5008       }
5009       break;
5010 
5011     case COMPLEX_TYPE:
5012       {
5013 	tree r, i;
5014 
5015 	if (notify_temp_creation)
5016 	  return GS_OK;
5017 
5018 	/* Extract the real and imaginary parts out of the ctor.  */
5019 	gcc_assert (elts->length () == 2);
5020 	r = (*elts)[0].value;
5021 	i = (*elts)[1].value;
5022 	if (r == NULL || i == NULL)
5023 	  {
5024 	    tree zero = build_zero_cst (TREE_TYPE (type));
5025 	    if (r == NULL)
5026 	      r = zero;
5027 	    if (i == NULL)
5028 	      i = zero;
5029 	  }
5030 
5031 	/* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5032 	   represent creation of a complex value.  */
5033 	if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5034 	  {
5035 	    ctor = build_complex (type, r, i);
5036 	    TREE_OPERAND (*expr_p, 1) = ctor;
5037 	  }
5038 	else
5039 	  {
5040 	    ctor = build2 (COMPLEX_EXPR, type, r, i);
5041 	    TREE_OPERAND (*expr_p, 1) = ctor;
5042 	    ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5043 				 pre_p,
5044 				 post_p,
5045 				 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5046 				 fb_rvalue);
5047 	  }
5048       }
5049       break;
5050 
5051     case VECTOR_TYPE:
5052       {
5053 	unsigned HOST_WIDE_INT ix;
5054 	constructor_elt *ce;
5055 
5056 	if (notify_temp_creation)
5057 	  return GS_OK;
5058 
5059 	/* Go ahead and simplify constant constructors to VECTOR_CST.  */
5060 	if (TREE_CONSTANT (ctor))
5061 	  {
5062 	    bool constant_p = true;
5063 	    tree value;
5064 
5065 	    /* Even when ctor is constant, it might contain non-*_CST
5066 	       elements, such as addresses or trapping values like
5067 	       1.0/0.0 - 1.0/0.0.  Such expressions don't belong
5068 	       in VECTOR_CST nodes.  */
5069 	    FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5070 	      if (!CONSTANT_CLASS_P (value))
5071 		{
5072 		  constant_p = false;
5073 		  break;
5074 		}
5075 
5076 	    if (constant_p)
5077 	      {
5078 		TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5079 		break;
5080 	      }
5081 
5082 	    TREE_CONSTANT (ctor) = 0;
5083 	  }
5084 
5085 	/* Vector types use CONSTRUCTOR all the way through gimple
5086 	   compilation as a general initializer.  */
5087 	FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5088 	  {
5089 	    enum gimplify_status tret;
5090 	    tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5091 				  fb_rvalue);
5092 	    if (tret == GS_ERROR)
5093 	      ret = GS_ERROR;
5094 	    else if (TREE_STATIC (ctor)
5095 		     && !initializer_constant_valid_p (ce->value,
5096 						       TREE_TYPE (ce->value)))
5097 	      TREE_STATIC (ctor) = 0;
5098 	  }
5099 	if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5100 	  TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5101       }
5102       break;
5103 
5104     default:
5105       /* So how did we get a CONSTRUCTOR for a scalar type?  */
5106       gcc_unreachable ();
5107     }
5108 
5109   if (ret == GS_ERROR)
5110     return GS_ERROR;
5111   /* If we have gimplified both sides of the initializer but have
5112      not emitted an assignment, do so now.  */
5113   if (*expr_p)
5114     {
5115       tree lhs = TREE_OPERAND (*expr_p, 0);
5116       tree rhs = TREE_OPERAND (*expr_p, 1);
5117       if (want_value && object == lhs)
5118 	lhs = unshare_expr (lhs);
5119       gassign *init = gimple_build_assign (lhs, rhs);
5120       gimplify_seq_add_stmt (pre_p, init);
5121     }
5122   if (want_value)
5123     {
5124       *expr_p = object;
5125       return GS_OK;
5126     }
5127   else
5128     {
5129       *expr_p = NULL;
5130       return GS_ALL_DONE;
5131     }
5132 }
5133 
5134 /* Given a pointer value OP0, return a simplified version of an
5135    indirection through OP0, or NULL_TREE if no simplification is
5136    possible.  This may only be applied to a rhs of an expression.
5137    Note that the resulting type may be different from the type pointed
5138    to in the sense that it is still compatible from the langhooks
5139    point of view. */
5140 
5141 static tree
5142 gimple_fold_indirect_ref_rhs (tree t)
5143 {
5144   return gimple_fold_indirect_ref (t);
5145 }
5146 
5147 /* Subroutine of gimplify_modify_expr to do simplifications of
5148    MODIFY_EXPRs based on the code of the RHS.  We loop for as long as
5149    something changes.  */
5150 
5151 static enum gimplify_status
5152 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5153 			  gimple_seq *pre_p, gimple_seq *post_p,
5154 			  bool want_value)
5155 {
5156   enum gimplify_status ret = GS_UNHANDLED;
5157   bool changed;
5158 
5159   do
5160     {
5161       changed = false;
5162       switch (TREE_CODE (*from_p))
5163 	{
5164 	case VAR_DECL:
5165 	  /* If we're assigning from a read-only variable initialized with
5166 	     a constructor, do the direct assignment from the constructor,
5167 	     but only if neither source nor target are volatile since this
5168 	     latter assignment might end up being done on a per-field basis.  */
5169 	  if (DECL_INITIAL (*from_p)
5170 	      && TREE_READONLY (*from_p)
5171 	      && !TREE_THIS_VOLATILE (*from_p)
5172 	      && !TREE_THIS_VOLATILE (*to_p)
5173 	      && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5174 	    {
5175 	      tree old_from = *from_p;
5176 	      enum gimplify_status subret;
5177 
5178 	      /* Move the constructor into the RHS.  */
5179 	      *from_p = unshare_expr (DECL_INITIAL (*from_p));
5180 
5181 	      /* Let's see if gimplify_init_constructor will need to put
5182 		 it in memory.  */
5183 	      subret = gimplify_init_constructor (expr_p, NULL, NULL,
5184 						  false, true);
5185 	      if (subret == GS_ERROR)
5186 		{
5187 		  /* If so, revert the change.  */
5188 		  *from_p = old_from;
5189 		}
5190 	      else
5191 		{
5192 		  ret = GS_OK;
5193 		  changed = true;
5194 		}
5195 	    }
5196 	  break;
5197 	case INDIRECT_REF:
5198 	  {
5199 	    /* If we have code like
5200 
5201 	     *(const A*)(A*)&x
5202 
5203 	     where the type of "x" is a (possibly cv-qualified variant
5204 	     of "A"), treat the entire expression as identical to "x".
5205 	     This kind of code arises in C++ when an object is bound
5206 	     to a const reference, and if "x" is a TARGET_EXPR we want
5207 	     to take advantage of the optimization below.  */
5208 	    bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5209 	    tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5210 	    if (t)
5211 	      {
5212 		if (TREE_THIS_VOLATILE (t) != volatile_p)
5213 		  {
5214 		    if (DECL_P (t))
5215 		      t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5216 						    build_fold_addr_expr (t));
5217 		    if (REFERENCE_CLASS_P (t))
5218 		      TREE_THIS_VOLATILE (t) = volatile_p;
5219 		  }
5220 		*from_p = t;
5221 		ret = GS_OK;
5222 		changed = true;
5223 	      }
5224 	    break;
5225 	  }
5226 
5227 	case TARGET_EXPR:
5228 	  {
5229 	    /* If we are initializing something from a TARGET_EXPR, strip the
5230 	       TARGET_EXPR and initialize it directly, if possible.  This can't
5231 	       be done if the initializer is void, since that implies that the
5232 	       temporary is set in some non-trivial way.
5233 
5234 	       ??? What about code that pulls out the temp and uses it
5235 	       elsewhere? I think that such code never uses the TARGET_EXPR as
5236 	       an initializer.  If I'm wrong, we'll die because the temp won't
5237 	       have any RTL.  In that case, I guess we'll need to replace
5238 	       references somehow.  */
5239 	    tree init = TARGET_EXPR_INITIAL (*from_p);
5240 
5241 	    if (init
5242 		&& (TREE_CODE (*expr_p) != MODIFY_EXPR
5243 		    || !TARGET_EXPR_NO_ELIDE (*from_p))
5244 		&& !VOID_TYPE_P (TREE_TYPE (init)))
5245 	      {
5246 		*from_p = init;
5247 		ret = GS_OK;
5248 		changed = true;
5249 	      }
5250 	  }
5251 	  break;
5252 
5253 	case COMPOUND_EXPR:
5254 	  /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5255 	     caught.  */
5256 	  gimplify_compound_expr (from_p, pre_p, true);
5257 	  ret = GS_OK;
5258 	  changed = true;
5259 	  break;
5260 
5261 	case CONSTRUCTOR:
5262 	  /* If we already made some changes, let the front end have a
5263 	     crack at this before we break it down.  */
5264 	  if (ret != GS_UNHANDLED)
5265 	    break;
5266 	  /* If we're initializing from a CONSTRUCTOR, break this into
5267 	     individual MODIFY_EXPRs.  */
5268 	  return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5269 					    false);
5270 
5271 	case COND_EXPR:
5272 	  /* If we're assigning to a non-register type, push the assignment
5273 	     down into the branches.  This is mandatory for ADDRESSABLE types,
5274 	     since we cannot generate temporaries for such, but it saves a
5275 	     copy in other cases as well.  */
5276 	  if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5277 	    {
5278 	      /* This code should mirror the code in gimplify_cond_expr. */
5279 	      enum tree_code code = TREE_CODE (*expr_p);
5280 	      tree cond = *from_p;
5281 	      tree result = *to_p;
5282 
5283 	      ret = gimplify_expr (&result, pre_p, post_p,
5284 				   is_gimple_lvalue, fb_lvalue);
5285 	      if (ret != GS_ERROR)
5286 		ret = GS_OK;
5287 
5288 	      /* If we are going to write RESULT more than once, clear
5289 		 TREE_READONLY flag, otherwise we might incorrectly promote
5290 		 the variable to static const and initialize it at compile
5291 		 time in one of the branches.  */
5292 	      if (VAR_P (result)
5293 		  && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5294 		  && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5295 		TREE_READONLY (result) = 0;
5296 	      if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5297 		TREE_OPERAND (cond, 1)
5298 		  = build2 (code, void_type_node, result,
5299 			    TREE_OPERAND (cond, 1));
5300 	      if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5301 		TREE_OPERAND (cond, 2)
5302 		  = build2 (code, void_type_node, unshare_expr (result),
5303 			    TREE_OPERAND (cond, 2));
5304 
5305 	      TREE_TYPE (cond) = void_type_node;
5306 	      recalculate_side_effects (cond);
5307 
5308 	      if (want_value)
5309 		{
5310 		  gimplify_and_add (cond, pre_p);
5311 		  *expr_p = unshare_expr (result);
5312 		}
5313 	      else
5314 		*expr_p = cond;
5315 	      return ret;
5316 	    }
5317 	  break;
5318 
5319 	case CALL_EXPR:
5320 	  /* For calls that return in memory, give *to_p as the CALL_EXPR's
5321 	     return slot so that we don't generate a temporary.  */
5322 	  if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5323 	      && aggregate_value_p (*from_p, *from_p))
5324 	    {
5325 	      bool use_target;
5326 
5327 	      if (!(rhs_predicate_for (*to_p))(*from_p))
5328 		/* If we need a temporary, *to_p isn't accurate.  */
5329 		use_target = false;
5330 	      /* It's OK to use the return slot directly unless it's an NRV. */
5331 	      else if (TREE_CODE (*to_p) == RESULT_DECL
5332 		       && DECL_NAME (*to_p) == NULL_TREE
5333 		       && needs_to_live_in_memory (*to_p))
5334 		use_target = true;
5335 	      else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5336 		       || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5337 		/* Don't force regs into memory.  */
5338 		use_target = false;
5339 	      else if (TREE_CODE (*expr_p) == INIT_EXPR)
5340 		/* It's OK to use the target directly if it's being
5341 		   initialized. */
5342 		use_target = true;
5343 	      else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5344 		       != INTEGER_CST)
5345 		/* Always use the target and thus RSO for variable-sized types.
5346 		   GIMPLE cannot deal with a variable-sized assignment
5347 		   embedded in a call statement.  */
5348 		use_target = true;
5349 	      else if (TREE_CODE (*to_p) != SSA_NAME
5350 		      && (!is_gimple_variable (*to_p)
5351 			  || needs_to_live_in_memory (*to_p)))
5352 		/* Don't use the original target if it's already addressable;
5353 		   if its address escapes, and the called function uses the
5354 		   NRV optimization, a conforming program could see *to_p
5355 		   change before the called function returns; see c++/19317.
5356 		   When optimizing, the return_slot pass marks more functions
5357 		   as safe after we have escape info.  */
5358 		use_target = false;
5359 	      else
5360 		use_target = true;
5361 
5362 	      if (use_target)
5363 		{
5364 		  CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5365 		  mark_addressable (*to_p);
5366 		}
5367 	    }
5368 	  break;
5369 
5370 	case WITH_SIZE_EXPR:
5371 	  /* Likewise for calls that return an aggregate of non-constant size,
5372 	     since we would not be able to generate a temporary at all.  */
5373 	  if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5374 	    {
5375 	      *from_p = TREE_OPERAND (*from_p, 0);
5376 	      /* We don't change ret in this case because the
5377 		 WITH_SIZE_EXPR might have been added in
5378 		 gimplify_modify_expr, so returning GS_OK would lead to an
5379 		 infinite loop.  */
5380 	      changed = true;
5381 	    }
5382 	  break;
5383 
5384 	  /* If we're initializing from a container, push the initialization
5385 	     inside it.  */
5386 	case CLEANUP_POINT_EXPR:
5387 	case BIND_EXPR:
5388 	case STATEMENT_LIST:
5389 	  {
5390 	    tree wrap = *from_p;
5391 	    tree t;
5392 
5393 	    ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5394 				 fb_lvalue);
5395 	    if (ret != GS_ERROR)
5396 	      ret = GS_OK;
5397 
5398 	    t = voidify_wrapper_expr (wrap, *expr_p);
5399 	    gcc_assert (t == *expr_p);
5400 
5401 	    if (want_value)
5402 	      {
5403 		gimplify_and_add (wrap, pre_p);
5404 		*expr_p = unshare_expr (*to_p);
5405 	      }
5406 	    else
5407 	      *expr_p = wrap;
5408 	    return GS_OK;
5409 	  }
5410 
5411 	case COMPOUND_LITERAL_EXPR:
5412 	  {
5413 	    tree complit = TREE_OPERAND (*expr_p, 1);
5414 	    tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5415 	    tree decl = DECL_EXPR_DECL (decl_s);
5416 	    tree init = DECL_INITIAL (decl);
5417 
5418 	    /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5419 	       into struct T x = { 0, 1, 2 } if the address of the
5420 	       compound literal has never been taken.  */
5421 	    if (!TREE_ADDRESSABLE (complit)
5422 		&& !TREE_ADDRESSABLE (decl)
5423 		&& init)
5424 	      {
5425 		*expr_p = copy_node (*expr_p);
5426 		TREE_OPERAND (*expr_p, 1) = init;
5427 		return GS_OK;
5428 	      }
5429 	  }
5430 
5431 	default:
5432 	  break;
5433 	}
5434     }
5435   while (changed);
5436 
5437   return ret;
5438 }
5439 
5440 
5441 /* Return true if T looks like a valid GIMPLE statement.  */
5442 
5443 static bool
5444 is_gimple_stmt (tree t)
5445 {
5446   const enum tree_code code = TREE_CODE (t);
5447 
5448   switch (code)
5449     {
5450     case NOP_EXPR:
5451       /* The only valid NOP_EXPR is the empty statement.  */
5452       return IS_EMPTY_STMT (t);
5453 
5454     case BIND_EXPR:
5455     case COND_EXPR:
5456       /* These are only valid if they're void.  */
5457       return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5458 
5459     case SWITCH_EXPR:
5460     case GOTO_EXPR:
5461     case RETURN_EXPR:
5462     case LABEL_EXPR:
5463     case CASE_LABEL_EXPR:
5464     case TRY_CATCH_EXPR:
5465     case TRY_FINALLY_EXPR:
5466     case EH_FILTER_EXPR:
5467     case CATCH_EXPR:
5468     case ASM_EXPR:
5469     case STATEMENT_LIST:
5470     case OACC_PARALLEL:
5471     case OACC_KERNELS:
5472     case OACC_DATA:
5473     case OACC_HOST_DATA:
5474     case OACC_DECLARE:
5475     case OACC_UPDATE:
5476     case OACC_ENTER_DATA:
5477     case OACC_EXIT_DATA:
5478     case OACC_CACHE:
5479     case OMP_PARALLEL:
5480     case OMP_FOR:
5481     case OMP_SIMD:
5482     case OMP_DISTRIBUTE:
5483     case OACC_LOOP:
5484     case OMP_SECTIONS:
5485     case OMP_SECTION:
5486     case OMP_SINGLE:
5487     case OMP_MASTER:
5488     case OMP_TASKGROUP:
5489     case OMP_ORDERED:
5490     case OMP_CRITICAL:
5491     case OMP_TASK:
5492     case OMP_TARGET:
5493     case OMP_TARGET_DATA:
5494     case OMP_TARGET_UPDATE:
5495     case OMP_TARGET_ENTER_DATA:
5496     case OMP_TARGET_EXIT_DATA:
5497     case OMP_TASKLOOP:
5498     case OMP_TEAMS:
5499       /* These are always void.  */
5500       return true;
5501 
5502     case CALL_EXPR:
5503     case MODIFY_EXPR:
5504     case PREDICT_EXPR:
5505       /* These are valid regardless of their type.  */
5506       return true;
5507 
5508     default:
5509       return false;
5510     }
5511 }
5512 
5513 
5514 /* Promote partial stores to COMPLEX variables to total stores.  *EXPR_P is
5515    a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5516    DECL_GIMPLE_REG_P set.
5517 
5518    IMPORTANT NOTE: This promotion is performed by introducing a load of the
5519    other, unmodified part of the complex object just before the total store.
5520    As a consequence, if the object is still uninitialized, an undefined value
5521    will be loaded into a register, which may result in a spurious exception
5522    if the register is floating-point and the value happens to be a signaling
5523    NaN for example.  Then the fully-fledged complex operations lowering pass
5524    followed by a DCE pass are necessary in order to fix things up.  */
5525 
5526 static enum gimplify_status
5527 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5528                                    bool want_value)
5529 {
5530   enum tree_code code, ocode;
5531   tree lhs, rhs, new_rhs, other, realpart, imagpart;
5532 
5533   lhs = TREE_OPERAND (*expr_p, 0);
5534   rhs = TREE_OPERAND (*expr_p, 1);
5535   code = TREE_CODE (lhs);
5536   lhs = TREE_OPERAND (lhs, 0);
5537 
5538   ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5539   other = build1 (ocode, TREE_TYPE (rhs), lhs);
5540   TREE_NO_WARNING (other) = 1;
5541   other = get_formal_tmp_var (other, pre_p);
5542 
5543   realpart = code == REALPART_EXPR ? rhs : other;
5544   imagpart = code == REALPART_EXPR ? other : rhs;
5545 
5546   if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5547     new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5548   else
5549     new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5550 
5551   gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5552   *expr_p = (want_value) ? rhs : NULL_TREE;
5553 
5554   return GS_ALL_DONE;
5555 }
5556 
5557 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5558 
5559       modify_expr
5560 	      : varname '=' rhs
5561 	      | '*' ID '=' rhs
5562 
5563     PRE_P points to the list where side effects that must happen before
5564 	*EXPR_P should be stored.
5565 
5566     POST_P points to the list where side effects that must happen after
5567 	*EXPR_P should be stored.
5568 
5569     WANT_VALUE is nonzero iff we want to use the value of this expression
5570 	in another expression.  */
5571 
5572 static enum gimplify_status
5573 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5574 		      bool want_value)
5575 {
5576   tree *from_p = &TREE_OPERAND (*expr_p, 1);
5577   tree *to_p = &TREE_OPERAND (*expr_p, 0);
5578   enum gimplify_status ret = GS_UNHANDLED;
5579   gimple *assign;
5580   location_t loc = EXPR_LOCATION (*expr_p);
5581   gimple_stmt_iterator gsi;
5582 
5583   gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5584 	      || TREE_CODE (*expr_p) == INIT_EXPR);
5585 
5586   /* Trying to simplify a clobber using normal logic doesn't work,
5587      so handle it here.  */
5588   if (TREE_CLOBBER_P (*from_p))
5589     {
5590       ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5591       if (ret == GS_ERROR)
5592 	return ret;
5593       gcc_assert (!want_value
5594 		  && (VAR_P (*to_p) || TREE_CODE (*to_p) == MEM_REF));
5595       gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5596       *expr_p = NULL;
5597       return GS_ALL_DONE;
5598     }
5599 
5600   /* Insert pointer conversions required by the middle-end that are not
5601      required by the frontend.  This fixes middle-end type checking for
5602      for example gcc.dg/redecl-6.c.  */
5603   if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5604     {
5605       STRIP_USELESS_TYPE_CONVERSION (*from_p);
5606       if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5607 	*from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5608     }
5609 
5610   /* See if any simplifications can be done based on what the RHS is.  */
5611   ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5612 				  want_value);
5613   if (ret != GS_UNHANDLED)
5614     return ret;
5615 
5616   /* For zero sized types only gimplify the left hand side and right hand
5617      side as statements and throw away the assignment.  Do this after
5618      gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5619      types properly.  */
5620   if (zero_sized_type (TREE_TYPE (*from_p))
5621       && !want_value
5622       /* Don't do this for calls that return addressable types, expand_call
5623 	 relies on those having a lhs.  */
5624       && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5625 	   && TREE_CODE (*from_p) == CALL_EXPR))
5626     {
5627       gimplify_stmt (from_p, pre_p);
5628       gimplify_stmt (to_p, pre_p);
5629       *expr_p = NULL_TREE;
5630       return GS_ALL_DONE;
5631     }
5632 
5633   /* If the value being copied is of variable width, compute the length
5634      of the copy into a WITH_SIZE_EXPR.   Note that we need to do this
5635      before gimplifying any of the operands so that we can resolve any
5636      PLACEHOLDER_EXPRs in the size.  Also note that the RTL expander uses
5637      the size of the expression to be copied, not of the destination, so
5638      that is what we must do here.  */
5639   maybe_with_size_expr (from_p);
5640 
5641   /* As a special case, we have to temporarily allow for assignments
5642      with a CALL_EXPR on the RHS.  Since in GIMPLE a function call is
5643      a toplevel statement, when gimplifying the GENERIC expression
5644      MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5645      GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5646 
5647      Instead, we need to create the tuple GIMPLE_CALL <a, foo>.  To
5648      prevent gimplify_expr from trying to create a new temporary for
5649      foo's LHS, we tell it that it should only gimplify until it
5650      reaches the CALL_EXPR.  On return from gimplify_expr, the newly
5651      created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5652      and all we need to do here is set 'a' to be its LHS.  */
5653 
5654   /* Gimplify the RHS first for C++17 and bug 71104.  */
5655   gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5656   ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5657   if (ret == GS_ERROR)
5658     return ret;
5659 
5660   /* Then gimplify the LHS.  */
5661   /* If we gimplified the RHS to a CALL_EXPR and that call may return
5662      twice we have to make sure to gimplify into non-SSA as otherwise
5663      the abnormal edge added later will make those defs not dominate
5664      their uses.
5665      ???  Technically this applies only to the registers used in the
5666      resulting non-register *TO_P.  */
5667   bool saved_into_ssa = gimplify_ctxp->into_ssa;
5668   if (saved_into_ssa
5669       && TREE_CODE (*from_p) == CALL_EXPR
5670       && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5671     gimplify_ctxp->into_ssa = false;
5672   ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5673   gimplify_ctxp->into_ssa = saved_into_ssa;
5674   if (ret == GS_ERROR)
5675     return ret;
5676 
5677   /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5678      guess for the predicate was wrong.  */
5679   gimple_predicate final_pred = rhs_predicate_for (*to_p);
5680   if (final_pred != initial_pred)
5681     {
5682       ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5683       if (ret == GS_ERROR)
5684 	return ret;
5685     }
5686 
5687   /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5688      size as argument to the call.  */
5689   if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5690     {
5691       tree call = TREE_OPERAND (*from_p, 0);
5692       tree vlasize = TREE_OPERAND (*from_p, 1);
5693 
5694       if (TREE_CODE (call) == CALL_EXPR
5695 	  && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5696 	{
5697 	  int nargs = call_expr_nargs (call);
5698 	  tree type = TREE_TYPE (call);
5699 	  tree ap = CALL_EXPR_ARG (call, 0);
5700 	  tree tag = CALL_EXPR_ARG (call, 1);
5701 	  tree aptag = CALL_EXPR_ARG (call, 2);
5702 	  tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5703 						       IFN_VA_ARG, type,
5704 						       nargs + 1, ap, tag,
5705 						       aptag, vlasize);
5706 	  TREE_OPERAND (*from_p, 0) = newcall;
5707 	}
5708     }
5709 
5710   /* Now see if the above changed *from_p to something we handle specially.  */
5711   ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5712 				  want_value);
5713   if (ret != GS_UNHANDLED)
5714     return ret;
5715 
5716   /* If we've got a variable sized assignment between two lvalues (i.e. does
5717      not involve a call), then we can make things a bit more straightforward
5718      by converting the assignment to memcpy or memset.  */
5719   if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5720     {
5721       tree from = TREE_OPERAND (*from_p, 0);
5722       tree size = TREE_OPERAND (*from_p, 1);
5723 
5724       if (TREE_CODE (from) == CONSTRUCTOR)
5725 	return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5726 
5727       if (is_gimple_addressable (from))
5728 	{
5729 	  *from_p = from;
5730 	  return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5731 	      					 pre_p);
5732 	}
5733     }
5734 
5735   /* Transform partial stores to non-addressable complex variables into
5736      total stores.  This allows us to use real instead of virtual operands
5737      for these variables, which improves optimization.  */
5738   if ((TREE_CODE (*to_p) == REALPART_EXPR
5739        || TREE_CODE (*to_p) == IMAGPART_EXPR)
5740       && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5741     return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5742 
5743   /* Try to alleviate the effects of the gimplification creating artificial
5744      temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5745      make sure not to create DECL_DEBUG_EXPR links across functions.  */
5746   if (!gimplify_ctxp->into_ssa
5747       && VAR_P (*from_p)
5748       && DECL_IGNORED_P (*from_p)
5749       && DECL_P (*to_p)
5750       && !DECL_IGNORED_P (*to_p)
5751       && decl_function_context (*to_p) == current_function_decl
5752       && decl_function_context (*from_p) == current_function_decl)
5753     {
5754       if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5755 	DECL_NAME (*from_p)
5756 	  = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5757       DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5758       SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5759    }
5760 
5761   if (want_value && TREE_THIS_VOLATILE (*to_p))
5762     *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5763 
5764   if (TREE_CODE (*from_p) == CALL_EXPR)
5765     {
5766       /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5767 	 instead of a GIMPLE_ASSIGN.  */
5768       gcall *call_stmt;
5769       if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5770 	{
5771 	  /* Gimplify internal functions created in the FEs.  */
5772 	  int nargs = call_expr_nargs (*from_p), i;
5773 	  enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5774 	  auto_vec<tree> vargs (nargs);
5775 
5776 	  for (i = 0; i < nargs; i++)
5777 	    {
5778 	      gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5779 			    EXPR_LOCATION (*from_p));
5780 	      vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5781 	    }
5782 	  call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5783 	  gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5784 	  gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5785 	}
5786       else
5787 	{
5788 	  tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5789 	  CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5790 	  STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5791 	  tree fndecl = get_callee_fndecl (*from_p);
5792 	  if (fndecl
5793 	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5794 	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
5795 	      && call_expr_nargs (*from_p) == 3)
5796 	    call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5797 						    CALL_EXPR_ARG (*from_p, 0),
5798 						    CALL_EXPR_ARG (*from_p, 1),
5799 						    CALL_EXPR_ARG (*from_p, 2));
5800 	  else
5801 	    {
5802 	      call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5803 	    }
5804 	}
5805       notice_special_calls (call_stmt);
5806       if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5807 	gimple_call_set_lhs (call_stmt, *to_p);
5808       else if (TREE_CODE (*to_p) == SSA_NAME)
5809 	/* The above is somewhat premature, avoid ICEing later for a
5810 	   SSA name w/o a definition.  We may have uses in the GIMPLE IL.
5811 	   ???  This doesn't make it a default-def.  */
5812 	SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5813 
5814       assign = call_stmt;
5815     }
5816   else
5817     {
5818       assign = gimple_build_assign (*to_p, *from_p);
5819       gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5820       if (COMPARISON_CLASS_P (*from_p))
5821 	gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5822     }
5823 
5824   if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5825     {
5826       /* We should have got an SSA name from the start.  */
5827       gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5828 		  || ! gimple_in_ssa_p (cfun));
5829     }
5830 
5831   gimplify_seq_add_stmt (pre_p, assign);
5832   gsi = gsi_last (*pre_p);
5833   maybe_fold_stmt (&gsi);
5834 
5835   if (want_value)
5836     {
5837       *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5838       return GS_OK;
5839     }
5840   else
5841     *expr_p = NULL;
5842 
5843   return GS_ALL_DONE;
5844 }
5845 
5846 /* Gimplify a comparison between two variable-sized objects.  Do this
5847    with a call to BUILT_IN_MEMCMP.  */
5848 
5849 static enum gimplify_status
5850 gimplify_variable_sized_compare (tree *expr_p)
5851 {
5852   location_t loc = EXPR_LOCATION (*expr_p);
5853   tree op0 = TREE_OPERAND (*expr_p, 0);
5854   tree op1 = TREE_OPERAND (*expr_p, 1);
5855   tree t, arg, dest, src, expr;
5856 
5857   arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5858   arg = unshare_expr (arg);
5859   arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5860   src = build_fold_addr_expr_loc (loc, op1);
5861   dest = build_fold_addr_expr_loc (loc, op0);
5862   t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5863   t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5864 
5865   expr
5866     = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5867   SET_EXPR_LOCATION (expr, loc);
5868   *expr_p = expr;
5869 
5870   return GS_OK;
5871 }
5872 
5873 /* Gimplify a comparison between two aggregate objects of integral scalar
5874    mode as a comparison between the bitwise equivalent scalar values.  */
5875 
5876 static enum gimplify_status
5877 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5878 {
5879   location_t loc = EXPR_LOCATION (*expr_p);
5880   tree op0 = TREE_OPERAND (*expr_p, 0);
5881   tree op1 = TREE_OPERAND (*expr_p, 1);
5882 
5883   tree type = TREE_TYPE (op0);
5884   tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5885 
5886   op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5887   op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5888 
5889   *expr_p
5890     = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5891 
5892   return GS_OK;
5893 }
5894 
5895 /* Gimplify an expression sequence.  This function gimplifies each
5896    expression and rewrites the original expression with the last
5897    expression of the sequence in GIMPLE form.
5898 
5899    PRE_P points to the list where the side effects for all the
5900        expressions in the sequence will be emitted.
5901 
5902    WANT_VALUE is true when the result of the last COMPOUND_EXPR is used.  */
5903 
5904 static enum gimplify_status
5905 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5906 {
5907   tree t = *expr_p;
5908 
5909   do
5910     {
5911       tree *sub_p = &TREE_OPERAND (t, 0);
5912 
5913       if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5914 	gimplify_compound_expr (sub_p, pre_p, false);
5915       else
5916 	gimplify_stmt (sub_p, pre_p);
5917 
5918       t = TREE_OPERAND (t, 1);
5919     }
5920   while (TREE_CODE (t) == COMPOUND_EXPR);
5921 
5922   *expr_p = t;
5923   if (want_value)
5924     return GS_OK;
5925   else
5926     {
5927       gimplify_stmt (expr_p, pre_p);
5928       return GS_ALL_DONE;
5929     }
5930 }
5931 
5932 /* Gimplify a SAVE_EXPR node.  EXPR_P points to the expression to
5933    gimplify.  After gimplification, EXPR_P will point to a new temporary
5934    that holds the original value of the SAVE_EXPR node.
5935 
5936    PRE_P points to the list where side effects that must happen before
5937    *EXPR_P should be stored.  */
5938 
5939 static enum gimplify_status
5940 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5941 {
5942   enum gimplify_status ret = GS_ALL_DONE;
5943   tree val;
5944 
5945   gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5946   val = TREE_OPERAND (*expr_p, 0);
5947 
5948   /* If the SAVE_EXPR has not been resolved, then evaluate it once.  */
5949   if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5950     {
5951       /* The operand may be a void-valued expression.  It is
5952 	 being executed only for its side-effects.  */
5953       if (TREE_TYPE (val) == void_type_node)
5954 	{
5955 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5956 			       is_gimple_stmt, fb_none);
5957 	  val = NULL;
5958 	}
5959       else
5960 	/* The temporary may not be an SSA name as later abnormal and EH
5961 	   control flow may invalidate use/def domination.  When in SSA
5962 	   form then assume there are no such issues and SAVE_EXPRs only
5963 	   appear via GENERIC foldings.  */
5964 	val = get_initialized_tmp_var (val, pre_p, post_p,
5965 				       gimple_in_ssa_p (cfun));
5966 
5967       TREE_OPERAND (*expr_p, 0) = val;
5968       SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5969     }
5970 
5971   *expr_p = val;
5972 
5973   return ret;
5974 }
5975 
5976 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5977 
5978       unary_expr
5979 	      : ...
5980 	      | '&' varname
5981 	      ...
5982 
5983     PRE_P points to the list where side effects that must happen before
5984 	*EXPR_P should be stored.
5985 
5986     POST_P points to the list where side effects that must happen after
5987 	*EXPR_P should be stored.  */
5988 
5989 static enum gimplify_status
5990 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5991 {
5992   tree expr = *expr_p;
5993   tree op0 = TREE_OPERAND (expr, 0);
5994   enum gimplify_status ret;
5995   location_t loc = EXPR_LOCATION (*expr_p);
5996 
5997   switch (TREE_CODE (op0))
5998     {
5999     case INDIRECT_REF:
6000     do_indirect_ref:
6001       /* Check if we are dealing with an expression of the form '&*ptr'.
6002 	 While the front end folds away '&*ptr' into 'ptr', these
6003 	 expressions may be generated internally by the compiler (e.g.,
6004 	 builtins like __builtin_va_end).  */
6005       /* Caution: the silent array decomposition semantics we allow for
6006 	 ADDR_EXPR means we can't always discard the pair.  */
6007       /* Gimplification of the ADDR_EXPR operand may drop
6008 	 cv-qualification conversions, so make sure we add them if
6009 	 needed.  */
6010       {
6011 	tree op00 = TREE_OPERAND (op0, 0);
6012 	tree t_expr = TREE_TYPE (expr);
6013 	tree t_op00 = TREE_TYPE (op00);
6014 
6015         if (!useless_type_conversion_p (t_expr, t_op00))
6016 	  op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6017         *expr_p = op00;
6018         ret = GS_OK;
6019       }
6020       break;
6021 
6022     case VIEW_CONVERT_EXPR:
6023       /* Take the address of our operand and then convert it to the type of
6024 	 this ADDR_EXPR.
6025 
6026 	 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6027 	 all clear.  The impact of this transformation is even less clear.  */
6028 
6029       /* If the operand is a useless conversion, look through it.  Doing so
6030 	 guarantees that the ADDR_EXPR and its operand will remain of the
6031 	 same type.  */
6032       if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6033 	op0 = TREE_OPERAND (op0, 0);
6034 
6035       *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6036 				  build_fold_addr_expr_loc (loc,
6037 							TREE_OPERAND (op0, 0)));
6038       ret = GS_OK;
6039       break;
6040 
6041     case MEM_REF:
6042       if (integer_zerop (TREE_OPERAND (op0, 1)))
6043 	goto do_indirect_ref;
6044 
6045       /* fall through */
6046 
6047     default:
6048       /* If we see a call to a declared builtin or see its address
6049 	 being taken (we can unify those cases here) then we can mark
6050 	 the builtin for implicit generation by GCC.  */
6051       if (TREE_CODE (op0) == FUNCTION_DECL
6052 	  && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
6053 	  && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6054 	set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6055 
6056       /* We use fb_either here because the C frontend sometimes takes
6057 	 the address of a call that returns a struct; see
6058 	 gcc.dg/c99-array-lval-1.c.  The gimplifier will correctly make
6059 	 the implied temporary explicit.  */
6060 
6061       /* Make the operand addressable.  */
6062       ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6063 			   is_gimple_addressable, fb_either);
6064       if (ret == GS_ERROR)
6065 	break;
6066 
6067       /* Then mark it.  Beware that it may not be possible to do so directly
6068 	 if a temporary has been created by the gimplification.  */
6069       prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6070 
6071       op0 = TREE_OPERAND (expr, 0);
6072 
6073       /* For various reasons, the gimplification of the expression
6074 	 may have made a new INDIRECT_REF.  */
6075       if (TREE_CODE (op0) == INDIRECT_REF
6076          || (TREE_CODE (op0) == MEM_REF
6077              && integer_zerop (TREE_OPERAND (op0, 1))))
6078 	goto do_indirect_ref;
6079 
6080       mark_addressable (TREE_OPERAND (expr, 0));
6081 
6082       /* The FEs may end up building ADDR_EXPRs early on a decl with
6083 	 an incomplete type.  Re-build ADDR_EXPRs in canonical form
6084 	 here.  */
6085       if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6086 	*expr_p = build_fold_addr_expr (op0);
6087 
6088       /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly.  */
6089       recompute_tree_invariant_for_addr_expr (*expr_p);
6090 
6091       /* If we re-built the ADDR_EXPR add a conversion to the original type
6092          if required.  */
6093       if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6094 	*expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6095 
6096       break;
6097     }
6098 
6099   return ret;
6100 }
6101 
6102 /* Gimplify the operands of an ASM_EXPR.  Input operands should be a gimple
6103    value; output operands should be a gimple lvalue.  */
6104 
6105 static enum gimplify_status
6106 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6107 {
6108   tree expr;
6109   int noutputs;
6110   const char **oconstraints;
6111   int i;
6112   tree link;
6113   const char *constraint;
6114   bool allows_mem, allows_reg, is_inout;
6115   enum gimplify_status ret, tret;
6116   gasm *stmt;
6117   vec<tree, va_gc> *inputs;
6118   vec<tree, va_gc> *outputs;
6119   vec<tree, va_gc> *clobbers;
6120   vec<tree, va_gc> *labels;
6121   tree link_next;
6122 
6123   expr = *expr_p;
6124   noutputs = list_length (ASM_OUTPUTS (expr));
6125   oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6126 
6127   inputs = NULL;
6128   outputs = NULL;
6129   clobbers = NULL;
6130   labels = NULL;
6131 
6132   ret = GS_ALL_DONE;
6133   link_next = NULL_TREE;
6134   for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6135     {
6136       bool ok;
6137       size_t constraint_len;
6138 
6139       link_next = TREE_CHAIN (link);
6140 
6141       oconstraints[i]
6142 	= constraint
6143 	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6144       constraint_len = strlen (constraint);
6145       if (constraint_len == 0)
6146         continue;
6147 
6148       ok = parse_output_constraint (&constraint, i, 0, 0,
6149 				    &allows_mem, &allows_reg, &is_inout);
6150       if (!ok)
6151 	{
6152 	  ret = GS_ERROR;
6153 	  is_inout = false;
6154 	}
6155 
6156       /* If we can't make copies, we can only accept memory.  */
6157       if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6158 	{
6159 	  if (allows_mem)
6160 	    allows_reg = 0;
6161 	  else
6162 	    {
6163 	      error ("impossible constraint in %<asm%>");
6164 	      error ("non-memory output %d must stay in memory", i);
6165 	      return GS_ERROR;
6166 	    }
6167 	}
6168 
6169       if (!allows_reg && allows_mem)
6170 	mark_addressable (TREE_VALUE (link));
6171 
6172       tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6173 			    is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6174 			    fb_lvalue | fb_mayfail);
6175       if (tret == GS_ERROR)
6176 	{
6177 	  error ("invalid lvalue in asm output %d", i);
6178 	  ret = tret;
6179 	}
6180 
6181       /* If the constraint does not allow memory make sure we gimplify
6182          it to a register if it is not already but its base is.  This
6183 	 happens for complex and vector components.  */
6184       if (!allows_mem)
6185 	{
6186 	  tree op = TREE_VALUE (link);
6187 	  if (! is_gimple_val (op)
6188 	      && is_gimple_reg_type (TREE_TYPE (op))
6189 	      && is_gimple_reg (get_base_address (op)))
6190 	    {
6191 	      tree tem = create_tmp_reg (TREE_TYPE (op));
6192 	      tree ass;
6193 	      if (is_inout)
6194 		{
6195 		  ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6196 				tem, unshare_expr (op));
6197 		  gimplify_and_add (ass, pre_p);
6198 		}
6199 	      ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6200 	      gimplify_and_add (ass, post_p);
6201 
6202 	      TREE_VALUE (link) = tem;
6203 	      tret = GS_OK;
6204 	    }
6205 	}
6206 
6207       vec_safe_push (outputs, link);
6208       TREE_CHAIN (link) = NULL_TREE;
6209 
6210       if (is_inout)
6211 	{
6212 	  /* An input/output operand.  To give the optimizers more
6213 	     flexibility, split it into separate input and output
6214  	     operands.  */
6215 	  tree input;
6216 	  /* Buffer big enough to format a 32-bit UINT_MAX into.  */
6217 	  char buf[11];
6218 
6219 	  /* Turn the in/out constraint into an output constraint.  */
6220 	  char *p = xstrdup (constraint);
6221 	  p[0] = '=';
6222 	  TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6223 
6224 	  /* And add a matching input constraint.  */
6225 	  if (allows_reg)
6226 	    {
6227 	      sprintf (buf, "%u", i);
6228 
6229 	      /* If there are multiple alternatives in the constraint,
6230 		 handle each of them individually.  Those that allow register
6231 		 will be replaced with operand number, the others will stay
6232 		 unchanged.  */
6233 	      if (strchr (p, ',') != NULL)
6234 		{
6235 		  size_t len = 0, buflen = strlen (buf);
6236 		  char *beg, *end, *str, *dst;
6237 
6238 		  for (beg = p + 1;;)
6239 		    {
6240 		      end = strchr (beg, ',');
6241 		      if (end == NULL)
6242 			end = strchr (beg, '\0');
6243 		      if ((size_t) (end - beg) < buflen)
6244 			len += buflen + 1;
6245 		      else
6246 			len += end - beg + 1;
6247 		      if (*end)
6248 			beg = end + 1;
6249 		      else
6250 			break;
6251 		    }
6252 
6253 		  str = (char *) alloca (len);
6254 		  for (beg = p + 1, dst = str;;)
6255 		    {
6256 		      const char *tem;
6257 		      bool mem_p, reg_p, inout_p;
6258 
6259 		      end = strchr (beg, ',');
6260 		      if (end)
6261 			*end = '\0';
6262 		      beg[-1] = '=';
6263 		      tem = beg - 1;
6264 		      parse_output_constraint (&tem, i, 0, 0,
6265 					       &mem_p, &reg_p, &inout_p);
6266 		      if (dst != str)
6267 			*dst++ = ',';
6268 		      if (reg_p)
6269 			{
6270 			  memcpy (dst, buf, buflen);
6271 			  dst += buflen;
6272 			}
6273 		      else
6274 			{
6275 			  if (end)
6276 			    len = end - beg;
6277 			  else
6278 			    len = strlen (beg);
6279 			  memcpy (dst, beg, len);
6280 			  dst += len;
6281 			}
6282 		      if (end)
6283 			beg = end + 1;
6284 		      else
6285 			break;
6286 		    }
6287 		  *dst = '\0';
6288 		  input = build_string (dst - str, str);
6289 		}
6290 	      else
6291 		input = build_string (strlen (buf), buf);
6292 	    }
6293 	  else
6294 	    input = build_string (constraint_len - 1, constraint + 1);
6295 
6296 	  free (p);
6297 
6298 	  input = build_tree_list (build_tree_list (NULL_TREE, input),
6299 				   unshare_expr (TREE_VALUE (link)));
6300 	  ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6301 	}
6302     }
6303 
6304   link_next = NULL_TREE;
6305   for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6306     {
6307       link_next = TREE_CHAIN (link);
6308       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6309       parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6310 			      oconstraints, &allows_mem, &allows_reg);
6311 
6312       /* If we can't make copies, we can only accept memory.  */
6313       if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6314 	{
6315 	  if (allows_mem)
6316 	    allows_reg = 0;
6317 	  else
6318 	    {
6319 	      error ("impossible constraint in %<asm%>");
6320 	      error ("non-memory input %d must stay in memory", i);
6321 	      return GS_ERROR;
6322 	    }
6323 	}
6324 
6325       /* If the operand is a memory input, it should be an lvalue.  */
6326       if (!allows_reg && allows_mem)
6327 	{
6328 	  tree inputv = TREE_VALUE (link);
6329 	  STRIP_NOPS (inputv);
6330 	  if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6331 	      || TREE_CODE (inputv) == PREINCREMENT_EXPR
6332 	      || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6333 	      || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6334 	      || TREE_CODE (inputv) == MODIFY_EXPR)
6335 	    TREE_VALUE (link) = error_mark_node;
6336 	  tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6337 				is_gimple_lvalue, fb_lvalue | fb_mayfail);
6338 	  if (tret != GS_ERROR)
6339 	    {
6340 	      /* Unlike output operands, memory inputs are not guaranteed
6341 		 to be lvalues by the FE, and while the expressions are
6342 		 marked addressable there, if it is e.g. a statement
6343 		 expression, temporaries in it might not end up being
6344 		 addressable.  They might be already used in the IL and thus
6345 		 it is too late to make them addressable now though.  */
6346 	      tree x = TREE_VALUE (link);
6347 	      while (handled_component_p (x))
6348 		x = TREE_OPERAND (x, 0);
6349 	      if (TREE_CODE (x) == MEM_REF
6350 		  && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6351 		x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6352 	      if ((VAR_P (x)
6353 		   || TREE_CODE (x) == PARM_DECL
6354 		   || TREE_CODE (x) == RESULT_DECL)
6355 		  && !TREE_ADDRESSABLE (x)
6356 		  && is_gimple_reg (x))
6357 		{
6358 		  warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6359 					       input_location), 0,
6360 			      "memory input %d is not directly addressable",
6361 			      i);
6362 		  prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6363 		}
6364 	    }
6365 	  mark_addressable (TREE_VALUE (link));
6366 	  if (tret == GS_ERROR)
6367 	    {
6368 	      error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6369 			"memory input %d is not directly addressable", i);
6370 	      ret = tret;
6371 	    }
6372 	}
6373       else
6374 	{
6375 	  tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6376 				is_gimple_asm_val, fb_rvalue);
6377 	  if (tret == GS_ERROR)
6378 	    ret = tret;
6379 	}
6380 
6381       TREE_CHAIN (link) = NULL_TREE;
6382       vec_safe_push (inputs, link);
6383     }
6384 
6385   link_next = NULL_TREE;
6386   for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6387     {
6388       link_next = TREE_CHAIN (link);
6389       TREE_CHAIN (link) = NULL_TREE;
6390       vec_safe_push (clobbers, link);
6391     }
6392 
6393   link_next = NULL_TREE;
6394   for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6395     {
6396       link_next = TREE_CHAIN (link);
6397       TREE_CHAIN (link) = NULL_TREE;
6398       vec_safe_push (labels, link);
6399     }
6400 
6401   /* Do not add ASMs with errors to the gimple IL stream.  */
6402   if (ret != GS_ERROR)
6403     {
6404       stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6405 				   inputs, outputs, clobbers, labels);
6406 
6407       gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6408       gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6409       gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6410 
6411       gimplify_seq_add_stmt (pre_p, stmt);
6412     }
6413 
6414   return ret;
6415 }
6416 
6417 /* Gimplify a CLEANUP_POINT_EXPR.  Currently this works by adding
6418    GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6419    gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6420    return to this function.
6421 
6422    FIXME should we complexify the prequeue handling instead?  Or use flags
6423    for all the cleanups and let the optimizer tighten them up?  The current
6424    code seems pretty fragile; it will break on a cleanup within any
6425    non-conditional nesting.  But any such nesting would be broken, anyway;
6426    we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6427    and continues out of it.  We can do that at the RTL level, though, so
6428    having an optimizer to tighten up try/finally regions would be a Good
6429    Thing.  */
6430 
6431 static enum gimplify_status
6432 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6433 {
6434   gimple_stmt_iterator iter;
6435   gimple_seq body_sequence = NULL;
6436 
6437   tree temp = voidify_wrapper_expr (*expr_p, NULL);
6438 
6439   /* We only care about the number of conditions between the innermost
6440      CLEANUP_POINT_EXPR and the cleanup.  So save and reset the count and
6441      any cleanups collected outside the CLEANUP_POINT_EXPR.  */
6442   int old_conds = gimplify_ctxp->conditions;
6443   gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6444   bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6445   gimplify_ctxp->conditions = 0;
6446   gimplify_ctxp->conditional_cleanups = NULL;
6447   gimplify_ctxp->in_cleanup_point_expr = true;
6448 
6449   gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6450 
6451   gimplify_ctxp->conditions = old_conds;
6452   gimplify_ctxp->conditional_cleanups = old_cleanups;
6453   gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6454 
6455   for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6456     {
6457       gimple *wce = gsi_stmt (iter);
6458 
6459       if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6460 	{
6461 	  if (gsi_one_before_end_p (iter))
6462 	    {
6463               /* Note that gsi_insert_seq_before and gsi_remove do not
6464                  scan operands, unlike some other sequence mutators.  */
6465 	      if (!gimple_wce_cleanup_eh_only (wce))
6466 		gsi_insert_seq_before_without_update (&iter,
6467 						      gimple_wce_cleanup (wce),
6468 						      GSI_SAME_STMT);
6469 	      gsi_remove (&iter, true);
6470 	      break;
6471 	    }
6472 	  else
6473 	    {
6474 	      gtry *gtry;
6475 	      gimple_seq seq;
6476 	      enum gimple_try_flags kind;
6477 
6478 	      if (gimple_wce_cleanup_eh_only (wce))
6479 		kind = GIMPLE_TRY_CATCH;
6480 	      else
6481 		kind = GIMPLE_TRY_FINALLY;
6482 	      seq = gsi_split_seq_after (iter);
6483 
6484 	      gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6485               /* Do not use gsi_replace here, as it may scan operands.
6486                  We want to do a simple structural modification only.  */
6487 	      gsi_set_stmt (&iter, gtry);
6488 	      iter = gsi_start (gtry->eval);
6489 	    }
6490 	}
6491       else
6492 	gsi_next (&iter);
6493     }
6494 
6495   gimplify_seq_add_seq (pre_p, body_sequence);
6496   if (temp)
6497     {
6498       *expr_p = temp;
6499       return GS_OK;
6500     }
6501   else
6502     {
6503       *expr_p = NULL;
6504       return GS_ALL_DONE;
6505     }
6506 }
6507 
6508 /* Insert a cleanup marker for gimplify_cleanup_point_expr.  CLEANUP
6509    is the cleanup action required.  EH_ONLY is true if the cleanup should
6510    only be executed if an exception is thrown, not on normal exit.
6511    If FORCE_UNCOND is true perform the cleanup unconditionally;  this is
6512    only valid for clobbers.  */
6513 
6514 static void
6515 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6516 		     bool force_uncond = false)
6517 {
6518   gimple *wce;
6519   gimple_seq cleanup_stmts = NULL;
6520 
6521   /* Errors can result in improperly nested cleanups.  Which results in
6522      confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR.  */
6523   if (seen_error ())
6524     return;
6525 
6526   if (gimple_conditional_context ())
6527     {
6528       /* If we're in a conditional context, this is more complex.  We only
6529 	 want to run the cleanup if we actually ran the initialization that
6530 	 necessitates it, but we want to run it after the end of the
6531 	 conditional context.  So we wrap the try/finally around the
6532 	 condition and use a flag to determine whether or not to actually
6533 	 run the destructor.  Thus
6534 
6535 	   test ? f(A()) : 0
6536 
6537 	 becomes (approximately)
6538 
6539 	   flag = 0;
6540 	   try {
6541 	     if (test) { A::A(temp); flag = 1; val = f(temp); }
6542 	     else { val = 0; }
6543 	   } finally {
6544 	     if (flag) A::~A(temp);
6545 	   }
6546 	   val
6547       */
6548       if (force_uncond)
6549 	{
6550 	  gimplify_stmt (&cleanup, &cleanup_stmts);
6551 	  wce = gimple_build_wce (cleanup_stmts);
6552 	  gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6553 	}
6554       else
6555 	{
6556 	  tree flag = create_tmp_var (boolean_type_node, "cleanup");
6557 	  gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6558 	  gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6559 
6560 	  cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6561 	  gimplify_stmt (&cleanup, &cleanup_stmts);
6562 	  wce = gimple_build_wce (cleanup_stmts);
6563 
6564 	  gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6565 	  gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6566 	  gimplify_seq_add_stmt (pre_p, ftrue);
6567 
6568 	  /* Because of this manipulation, and the EH edges that jump
6569 	     threading cannot redirect, the temporary (VAR) will appear
6570 	     to be used uninitialized.  Don't warn.  */
6571 	  TREE_NO_WARNING (var) = 1;
6572 	}
6573     }
6574   else
6575     {
6576       gimplify_stmt (&cleanup, &cleanup_stmts);
6577       wce = gimple_build_wce (cleanup_stmts);
6578       gimple_wce_set_cleanup_eh_only (wce, eh_only);
6579       gimplify_seq_add_stmt (pre_p, wce);
6580     }
6581 }
6582 
6583 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR.  */
6584 
6585 static enum gimplify_status
6586 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6587 {
6588   tree targ = *expr_p;
6589   tree temp = TARGET_EXPR_SLOT (targ);
6590   tree init = TARGET_EXPR_INITIAL (targ);
6591   enum gimplify_status ret;
6592 
6593   bool unpoison_empty_seq = false;
6594   gimple_stmt_iterator unpoison_it;
6595 
6596   if (init)
6597     {
6598       tree cleanup = NULL_TREE;
6599 
6600       /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6601 	 to the temps list.  Handle also variable length TARGET_EXPRs.  */
6602       if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6603 	{
6604 	  if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6605 	    gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6606 	  gimplify_vla_decl (temp, pre_p);
6607 	}
6608       else
6609 	{
6610 	  /* Save location where we need to place unpoisoning.  It's possible
6611 	     that a variable will be converted to needs_to_live_in_memory.  */
6612 	  unpoison_it = gsi_last (*pre_p);
6613 	  unpoison_empty_seq = gsi_end_p (unpoison_it);
6614 
6615 	  gimple_add_tmp_var (temp);
6616 	}
6617 
6618       /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6619 	 expression is supposed to initialize the slot.  */
6620       if (VOID_TYPE_P (TREE_TYPE (init)))
6621 	ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6622       else
6623 	{
6624 	  tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6625 	  init = init_expr;
6626 	  ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6627 	  init = NULL;
6628 	  ggc_free (init_expr);
6629 	}
6630       if (ret == GS_ERROR)
6631 	{
6632 	  /* PR c++/28266 Make sure this is expanded only once. */
6633 	  TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6634 	  return GS_ERROR;
6635 	}
6636       if (init)
6637 	gimplify_and_add (init, pre_p);
6638 
6639       /* If needed, push the cleanup for the temp.  */
6640       if (TARGET_EXPR_CLEANUP (targ))
6641 	{
6642 	  if (CLEANUP_EH_ONLY (targ))
6643 	    gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6644 				 CLEANUP_EH_ONLY (targ), pre_p);
6645 	  else
6646 	    cleanup = TARGET_EXPR_CLEANUP (targ);
6647 	}
6648 
6649       /* Add a clobber for the temporary going out of scope, like
6650 	 gimplify_bind_expr.  */
6651       if (gimplify_ctxp->in_cleanup_point_expr
6652 	  && needs_to_live_in_memory (temp))
6653 	{
6654 	  if (flag_stack_reuse == SR_ALL)
6655 	    {
6656 	      tree clobber = build_constructor (TREE_TYPE (temp),
6657 						NULL);
6658 	      TREE_THIS_VOLATILE (clobber) = true;
6659 	      clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6660 	      gimple_push_cleanup (temp, clobber, false, pre_p, true);
6661 	    }
6662 	  if (asan_poisoned_variables
6663 	      && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6664 	      && !TREE_STATIC (temp)
6665 	      && dbg_cnt (asan_use_after_scope)
6666 	      && !gimplify_omp_ctxp)
6667 	    {
6668 	      tree asan_cleanup = build_asan_poison_call_expr (temp);
6669 	      if (asan_cleanup)
6670 		{
6671 		  if (unpoison_empty_seq)
6672 		    unpoison_it = gsi_start (*pre_p);
6673 
6674 		  asan_poison_variable (temp, false, &unpoison_it,
6675 					unpoison_empty_seq);
6676 		  gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6677 		}
6678 	    }
6679 	}
6680       if (cleanup)
6681 	gimple_push_cleanup (temp, cleanup, false, pre_p);
6682 
6683       /* Only expand this once.  */
6684       TREE_OPERAND (targ, 3) = init;
6685       TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6686     }
6687   else
6688     /* We should have expanded this before.  */
6689     gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6690 
6691   *expr_p = temp;
6692   return GS_OK;
6693 }
6694 
6695 /* Gimplification of expression trees.  */
6696 
6697 /* Gimplify an expression which appears at statement context.  The
6698    corresponding GIMPLE statements are added to *SEQ_P.  If *SEQ_P is
6699    NULL, a new sequence is allocated.
6700 
6701    Return true if we actually added a statement to the queue.  */
6702 
6703 bool
6704 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6705 {
6706   gimple_seq_node last;
6707 
6708   last = gimple_seq_last (*seq_p);
6709   gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6710   return last != gimple_seq_last (*seq_p);
6711 }
6712 
6713 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6714    to CTX.  If entries already exist, force them to be some flavor of private.
6715    If there is no enclosing parallel, do nothing.  */
6716 
6717 void
6718 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6719 {
6720   splay_tree_node n;
6721 
6722   if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6723     return;
6724 
6725   do
6726     {
6727       n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6728       if (n != NULL)
6729 	{
6730 	  if (n->value & GOVD_SHARED)
6731 	    n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6732 	  else if (n->value & GOVD_MAP)
6733 	    n->value |= GOVD_MAP_TO_ONLY;
6734 	  else
6735 	    return;
6736 	}
6737       else if ((ctx->region_type & ORT_TARGET) != 0)
6738 	{
6739 	  if (ctx->target_map_scalars_firstprivate)
6740 	    omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6741 	  else
6742 	    omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6743 	}
6744       else if (ctx->region_type != ORT_WORKSHARE
6745 	       && ctx->region_type != ORT_SIMD
6746 	       && ctx->region_type != ORT_ACC
6747 	       && !(ctx->region_type & ORT_TARGET_DATA))
6748 	omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6749 
6750       ctx = ctx->outer_context;
6751     }
6752   while (ctx);
6753 }
6754 
6755 /* Similarly for each of the type sizes of TYPE.  */
6756 
6757 static void
6758 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6759 {
6760   if (type == NULL || type == error_mark_node)
6761     return;
6762   type = TYPE_MAIN_VARIANT (type);
6763 
6764   if (ctx->privatized_types->add (type))
6765     return;
6766 
6767   switch (TREE_CODE (type))
6768     {
6769     case INTEGER_TYPE:
6770     case ENUMERAL_TYPE:
6771     case BOOLEAN_TYPE:
6772     case REAL_TYPE:
6773     case FIXED_POINT_TYPE:
6774       omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6775       omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6776       break;
6777 
6778     case ARRAY_TYPE:
6779       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6780       omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6781       break;
6782 
6783     case RECORD_TYPE:
6784     case UNION_TYPE:
6785     case QUAL_UNION_TYPE:
6786       {
6787 	tree field;
6788 	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6789 	  if (TREE_CODE (field) == FIELD_DECL)
6790 	    {
6791 	      omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6792 	      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6793 	    }
6794       }
6795       break;
6796 
6797     case POINTER_TYPE:
6798     case REFERENCE_TYPE:
6799       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6800       break;
6801 
6802     default:
6803       break;
6804     }
6805 
6806   omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6807   omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6808   lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6809 }
6810 
6811 /* Add an entry for DECL in the OMP context CTX with FLAGS.  */
6812 
6813 static void
6814 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6815 {
6816   splay_tree_node n;
6817   unsigned int nflags;
6818   tree t;
6819 
6820   if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6821     return;
6822 
6823   /* Never elide decls whose type has TREE_ADDRESSABLE set.  This means
6824      there are constructors involved somewhere.  Exception is a shared clause,
6825      there is nothing privatized in that case.  */
6826   if ((flags & GOVD_SHARED) == 0
6827       && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6828 	  || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6829     flags |= GOVD_SEEN;
6830 
6831   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6832   if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6833     {
6834       /* We shouldn't be re-adding the decl with the same data
6835 	 sharing class.  */
6836       gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6837       nflags = n->value | flags;
6838       /* The only combination of data sharing classes we should see is
6839 	 FIRSTPRIVATE and LASTPRIVATE.  However, OpenACC permits
6840 	 reduction variables to be used in data sharing clauses.  */
6841       gcc_assert ((ctx->region_type & ORT_ACC) != 0
6842 		  || ((nflags & GOVD_DATA_SHARE_CLASS)
6843 		      == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6844 		  || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6845       n->value = nflags;
6846       return;
6847     }
6848 
6849   /* When adding a variable-sized variable, we have to handle all sorts
6850      of additional bits of data: the pointer replacement variable, and
6851      the parameters of the type.  */
6852   if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6853     {
6854       /* Add the pointer replacement variable as PRIVATE if the variable
6855 	 replacement is private, else FIRSTPRIVATE since we'll need the
6856 	 address of the original variable either for SHARED, or for the
6857 	 copy into or out of the context.  */
6858       if (!(flags & GOVD_LOCAL))
6859 	{
6860 	  if (flags & GOVD_MAP)
6861 	    nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6862 	  else if (flags & GOVD_PRIVATE)
6863 	    nflags = GOVD_PRIVATE;
6864 	  else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6865 		   && (flags & GOVD_FIRSTPRIVATE))
6866 	    nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6867 	  else
6868 	    nflags = GOVD_FIRSTPRIVATE;
6869 	  nflags |= flags & GOVD_SEEN;
6870 	  t = DECL_VALUE_EXPR (decl);
6871 	  gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6872 	  t = TREE_OPERAND (t, 0);
6873 	  gcc_assert (DECL_P (t));
6874 	  omp_add_variable (ctx, t, nflags);
6875 	}
6876 
6877       /* Add all of the variable and type parameters (which should have
6878 	 been gimplified to a formal temporary) as FIRSTPRIVATE.  */
6879       omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6880       omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6881       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6882 
6883       /* The variable-sized variable itself is never SHARED, only some form
6884 	 of PRIVATE.  The sharing would take place via the pointer variable
6885 	 which we remapped above.  */
6886       if (flags & GOVD_SHARED)
6887 	flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6888 		| (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6889 
6890       /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6891 	 alloca statement we generate for the variable, so make sure it
6892 	 is available.  This isn't automatically needed for the SHARED
6893 	 case, since we won't be allocating local storage then.
6894 	 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6895 	 in this case omp_notice_variable will be called later
6896 	 on when it is gimplified.  */
6897       else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6898 	       && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6899 	omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6900     }
6901   else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6902 	   && lang_hooks.decls.omp_privatize_by_reference (decl))
6903     {
6904       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6905 
6906       /* Similar to the direct variable sized case above, we'll need the
6907 	 size of references being privatized.  */
6908       if ((flags & GOVD_SHARED) == 0)
6909 	{
6910 	  t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6911 	  if (DECL_P (t))
6912 	    omp_notice_variable (ctx, t, true);
6913 	}
6914     }
6915 
6916   if (n != NULL)
6917     n->value |= flags;
6918   else
6919     splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6920 
6921   /* For reductions clauses in OpenACC loop directives, by default create a
6922      copy clause on the enclosing parallel construct for carrying back the
6923      results.  */
6924   if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6925     {
6926       struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6927       while (outer_ctx)
6928 	{
6929 	  n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6930 	  if (n != NULL)
6931 	    {
6932 	      /* Ignore local variables and explicitly declared clauses.  */
6933 	      if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6934 		break;
6935 	      else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6936 		{
6937 		  /* According to the OpenACC spec, such a reduction variable
6938 		     should already have a copy map on a kernels construct,
6939 		     verify that here.  */
6940 		  gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6941 			      && (n->value & GOVD_MAP));
6942 		}
6943 	      else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6944 		{
6945 		  /* Remove firstprivate and make it a copy map.  */
6946 		  n->value &= ~GOVD_FIRSTPRIVATE;
6947 		  n->value |= GOVD_MAP;
6948 		}
6949 	    }
6950 	  else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6951 	    {
6952 	      splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6953 				 GOVD_MAP | GOVD_SEEN);
6954 	      break;
6955 	    }
6956 	  outer_ctx = outer_ctx->outer_context;
6957 	}
6958     }
6959 }
6960 
6961 /* Notice a threadprivate variable DECL used in OMP context CTX.
6962    This just prints out diagnostics about threadprivate variable uses
6963    in untied tasks.  If DECL2 is non-NULL, prevent this warning
6964    on that variable.  */
6965 
6966 static bool
6967 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6968 				   tree decl2)
6969 {
6970   splay_tree_node n;
6971   struct gimplify_omp_ctx *octx;
6972 
6973   for (octx = ctx; octx; octx = octx->outer_context)
6974     if ((octx->region_type & ORT_TARGET) != 0)
6975       {
6976 	n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6977 	if (n == NULL)
6978 	  {
6979 	    error ("threadprivate variable %qE used in target region",
6980 		   DECL_NAME (decl));
6981 	    error_at (octx->location, "enclosing target region");
6982 	    splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6983 	  }
6984 	if (decl2)
6985 	  splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6986       }
6987 
6988   if (ctx->region_type != ORT_UNTIED_TASK)
6989     return false;
6990   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6991   if (n == NULL)
6992     {
6993       error ("threadprivate variable %qE used in untied task",
6994 	     DECL_NAME (decl));
6995       error_at (ctx->location, "enclosing task");
6996       splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6997     }
6998   if (decl2)
6999     splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7000   return false;
7001 }
7002 
7003 /* Return true if global var DECL is device resident.  */
7004 
7005 static bool
7006 device_resident_p (tree decl)
7007 {
7008   tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7009 
7010   if (!attr)
7011     return false;
7012 
7013   for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7014     {
7015       tree c = TREE_VALUE (t);
7016       if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7017 	return true;
7018     }
7019 
7020   return false;
7021 }
7022 
7023 /* Return true if DECL has an ACC DECLARE attribute.  */
7024 
7025 static bool
7026 is_oacc_declared (tree decl)
7027 {
7028   tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7029   tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7030   return declared != NULL_TREE;
7031 }
7032 
7033 /* Determine outer default flags for DECL mentioned in an OMP region
7034    but not declared in an enclosing clause.
7035 
7036    ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7037    remapped firstprivate instead of shared.  To some extent this is
7038    addressed in omp_firstprivatize_type_sizes, but not
7039    effectively.  */
7040 
7041 static unsigned
7042 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7043 		    bool in_code, unsigned flags)
7044 {
7045   enum omp_clause_default_kind default_kind = ctx->default_kind;
7046   enum omp_clause_default_kind kind;
7047 
7048   kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7049   if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7050     default_kind = kind;
7051 
7052   switch (default_kind)
7053     {
7054     case OMP_CLAUSE_DEFAULT_NONE:
7055       {
7056 	const char *rtype;
7057 
7058 	if (ctx->region_type & ORT_PARALLEL)
7059 	  rtype = "parallel";
7060 	else if (ctx->region_type & ORT_TASK)
7061 	  rtype = "task";
7062 	else if (ctx->region_type & ORT_TEAMS)
7063 	  rtype = "teams";
7064 	else
7065 	  gcc_unreachable ();
7066 
7067 	error ("%qE not specified in enclosing %qs",
7068 	       DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7069 	error_at (ctx->location, "enclosing %qs", rtype);
7070       }
7071       /* FALLTHRU */
7072     case OMP_CLAUSE_DEFAULT_SHARED:
7073       flags |= GOVD_SHARED;
7074       break;
7075     case OMP_CLAUSE_DEFAULT_PRIVATE:
7076       flags |= GOVD_PRIVATE;
7077       break;
7078     case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7079       flags |= GOVD_FIRSTPRIVATE;
7080       break;
7081     case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7082       /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED.  */
7083       gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7084       if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7085 	{
7086 	  omp_notice_variable (octx, decl, in_code);
7087 	  for (; octx; octx = octx->outer_context)
7088 	    {
7089 	      splay_tree_node n2;
7090 
7091 	      n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7092 	      if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7093 		  && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7094 		continue;
7095 	      if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7096 		{
7097 		  flags |= GOVD_FIRSTPRIVATE;
7098 		  goto found_outer;
7099 		}
7100 	      if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7101 		{
7102 		  flags |= GOVD_SHARED;
7103 		  goto found_outer;
7104 		}
7105 	    }
7106 	}
7107 
7108       if (TREE_CODE (decl) == PARM_DECL
7109 	  || (!is_global_var (decl)
7110 	      && DECL_CONTEXT (decl) == current_function_decl))
7111 	flags |= GOVD_FIRSTPRIVATE;
7112       else
7113 	flags |= GOVD_SHARED;
7114     found_outer:
7115       break;
7116 
7117     default:
7118       gcc_unreachable ();
7119     }
7120 
7121   return flags;
7122 }
7123 
7124 
7125 /* Determine outer default flags for DECL mentioned in an OACC region
7126    but not declared in an enclosing clause.  */
7127 
7128 static unsigned
7129 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7130 {
7131   const char *rkind;
7132   bool on_device = false;
7133   bool declared = is_oacc_declared (decl);
7134   tree type = TREE_TYPE (decl);
7135 
7136   if (lang_hooks.decls.omp_privatize_by_reference (decl))
7137     type = TREE_TYPE (type);
7138 
7139   if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7140       && is_global_var (decl)
7141       && device_resident_p (decl))
7142     {
7143       on_device = true;
7144       flags |= GOVD_MAP_TO_ONLY;
7145     }
7146 
7147   switch (ctx->region_type)
7148     {
7149     case ORT_ACC_KERNELS:
7150       rkind = "kernels";
7151 
7152       if (AGGREGATE_TYPE_P (type))
7153 	{
7154 	  /* Aggregates default to 'present_or_copy', or 'present'.  */
7155 	  if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7156 	    flags |= GOVD_MAP;
7157 	  else
7158 	    flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7159 	}
7160       else
7161 	/* Scalars default to 'copy'.  */
7162 	flags |= GOVD_MAP | GOVD_MAP_FORCE;
7163 
7164       break;
7165 
7166     case ORT_ACC_PARALLEL:
7167       rkind = "parallel";
7168 
7169       if (on_device || declared)
7170 	flags |= GOVD_MAP;
7171       else if (AGGREGATE_TYPE_P (type))
7172 	{
7173 	  /* Aggregates default to 'present_or_copy', or 'present'.  */
7174 	  if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7175 	    flags |= GOVD_MAP;
7176 	  else
7177 	    flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7178 	}
7179       else
7180 	/* Scalars default to 'firstprivate'.  */
7181 	flags |= GOVD_FIRSTPRIVATE;
7182 
7183       break;
7184 
7185     default:
7186       gcc_unreachable ();
7187     }
7188 
7189   if (DECL_ARTIFICIAL (decl))
7190     ; /* We can get compiler-generated decls, and should not complain
7191 	 about them.  */
7192   else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7193     {
7194       error ("%qE not specified in enclosing OpenACC %qs construct",
7195 	     DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7196       inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7197     }
7198   else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7199     ; /* Handled above.  */
7200   else
7201     gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7202 
7203   return flags;
7204 }
7205 
7206 /* Record the fact that DECL was used within the OMP context CTX.
7207    IN_CODE is true when real code uses DECL, and false when we should
7208    merely emit default(none) errors.  Return true if DECL is going to
7209    be remapped and thus DECL shouldn't be gimplified into its
7210    DECL_VALUE_EXPR (if any).  */
7211 
7212 static bool
7213 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7214 {
7215   splay_tree_node n;
7216   unsigned flags = in_code ? GOVD_SEEN : 0;
7217   bool ret = false, shared;
7218 
7219   if (error_operand_p (decl))
7220     return false;
7221 
7222   if (ctx->region_type == ORT_NONE)
7223     return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7224 
7225   if (is_global_var (decl))
7226     {
7227       /* Threadprivate variables are predetermined.  */
7228       if (DECL_THREAD_LOCAL_P (decl))
7229 	return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7230 
7231       if (DECL_HAS_VALUE_EXPR_P (decl))
7232 	{
7233 	  tree value = get_base_address (DECL_VALUE_EXPR (decl));
7234 
7235 	  if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7236 	    return omp_notice_threadprivate_variable (ctx, decl, value);
7237 	}
7238 
7239       if (gimplify_omp_ctxp->outer_context == NULL
7240 	  && VAR_P (decl)
7241 	  && oacc_get_fn_attrib (current_function_decl))
7242 	{
7243 	  location_t loc = DECL_SOURCE_LOCATION (decl);
7244 
7245 	  if (lookup_attribute ("omp declare target link",
7246 				DECL_ATTRIBUTES (decl)))
7247 	    {
7248 	      error_at (loc,
7249 			"%qE with %<link%> clause used in %<routine%> function",
7250 			DECL_NAME (decl));
7251 	      return false;
7252 	    }
7253 	  else if (!lookup_attribute ("omp declare target",
7254 				      DECL_ATTRIBUTES (decl)))
7255 	    {
7256 	      error_at (loc,
7257 			"%qE requires a %<declare%> directive for use "
7258 			"in a %<routine%> function", DECL_NAME (decl));
7259 	      return false;
7260 	    }
7261 	}
7262     }
7263 
7264   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7265   if ((ctx->region_type & ORT_TARGET) != 0)
7266     {
7267       ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7268       if (n == NULL)
7269 	{
7270 	  unsigned nflags = flags;
7271 	  if (ctx->target_map_pointers_as_0len_arrays
7272 	      || ctx->target_map_scalars_firstprivate)
7273 	    {
7274 	      bool is_declare_target = false;
7275 	      bool is_scalar = false;
7276 	      if (is_global_var (decl)
7277 		  && varpool_node::get_create (decl)->offloadable)
7278 		{
7279 		  struct gimplify_omp_ctx *octx;
7280 		  for (octx = ctx->outer_context;
7281 		       octx; octx = octx->outer_context)
7282 		    {
7283 		      n = splay_tree_lookup (octx->variables,
7284 					     (splay_tree_key)decl);
7285 		      if (n
7286 			  && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7287 			  && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7288 			break;
7289 		    }
7290 		  is_declare_target = octx == NULL;
7291 		}
7292 	      if (!is_declare_target && ctx->target_map_scalars_firstprivate)
7293 		is_scalar = lang_hooks.decls.omp_scalar_p (decl);
7294 	      if (is_declare_target)
7295 		;
7296 	      else if (ctx->target_map_pointers_as_0len_arrays
7297 		       && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7298 			   || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7299 			       && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7300 				  == POINTER_TYPE)))
7301 		nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
7302 	      else if (is_scalar)
7303 		nflags |= GOVD_FIRSTPRIVATE;
7304 	    }
7305 
7306 	  struct gimplify_omp_ctx *octx = ctx->outer_context;
7307 	  if ((ctx->region_type & ORT_ACC) && octx)
7308 	    {
7309 	      /* Look in outer OpenACC contexts, to see if there's a
7310 		 data attribute for this variable.  */
7311 	      omp_notice_variable (octx, decl, in_code);
7312 
7313 	      for (; octx; octx = octx->outer_context)
7314 		{
7315 		  if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7316 		    break;
7317 		  splay_tree_node n2
7318 		    = splay_tree_lookup (octx->variables,
7319 					 (splay_tree_key) decl);
7320 		  if (n2)
7321 		    {
7322 		      if (octx->region_type == ORT_ACC_HOST_DATA)
7323 		        error ("variable %qE declared in enclosing "
7324 			       "%<host_data%> region", DECL_NAME (decl));
7325 		      nflags |= GOVD_MAP;
7326 		      if (octx->region_type == ORT_ACC_DATA
7327 			  && (n2->value & GOVD_MAP_0LEN_ARRAY))
7328 			nflags |= GOVD_MAP_0LEN_ARRAY;
7329 		      goto found_outer;
7330 		    }
7331 		}
7332 	    }
7333 
7334 	  {
7335 	    tree type = TREE_TYPE (decl);
7336 
7337 	    if (nflags == flags
7338 		&& gimplify_omp_ctxp->target_firstprivatize_array_bases
7339 		&& lang_hooks.decls.omp_privatize_by_reference (decl))
7340 	      type = TREE_TYPE (type);
7341 	    if (nflags == flags
7342 		&& !lang_hooks.types.omp_mappable_type (type))
7343 	      {
7344 		error ("%qD referenced in target region does not have "
7345 		       "a mappable type", decl);
7346 		nflags |= GOVD_MAP | GOVD_EXPLICIT;
7347 	      }
7348 	    else if (nflags == flags)
7349 	      {
7350 		if ((ctx->region_type & ORT_ACC) != 0)
7351 		  nflags = oacc_default_clause (ctx, decl, flags);
7352 		else
7353 		  nflags |= GOVD_MAP;
7354 	      }
7355 	  }
7356 	found_outer:
7357 	  omp_add_variable (ctx, decl, nflags);
7358 	}
7359       else
7360 	{
7361 	  /* If nothing changed, there's nothing left to do.  */
7362 	  if ((n->value & flags) == flags)
7363 	    return ret;
7364 	  flags |= n->value;
7365 	  n->value = flags;
7366 	}
7367       goto do_outer;
7368     }
7369 
7370   if (n == NULL)
7371     {
7372       if (ctx->region_type == ORT_WORKSHARE
7373 	  || ctx->region_type == ORT_SIMD
7374 	  || ctx->region_type == ORT_ACC
7375 	  || (ctx->region_type & ORT_TARGET_DATA) != 0)
7376 	goto do_outer;
7377 
7378       flags = omp_default_clause (ctx, decl, in_code, flags);
7379 
7380       if ((flags & GOVD_PRIVATE)
7381 	  && lang_hooks.decls.omp_private_outer_ref (decl))
7382 	flags |= GOVD_PRIVATE_OUTER_REF;
7383 
7384       omp_add_variable (ctx, decl, flags);
7385 
7386       shared = (flags & GOVD_SHARED) != 0;
7387       ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7388       goto do_outer;
7389     }
7390 
7391   if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7392       && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7393       && DECL_SIZE (decl))
7394     {
7395       if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7396 	{
7397 	  splay_tree_node n2;
7398 	  tree t = DECL_VALUE_EXPR (decl);
7399 	  gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7400 	  t = TREE_OPERAND (t, 0);
7401 	  gcc_assert (DECL_P (t));
7402 	  n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7403 	  n2->value |= GOVD_SEEN;
7404 	}
7405       else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7406 	       && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7407 	       && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7408 		   != INTEGER_CST))
7409 	{
7410 	  splay_tree_node n2;
7411 	  tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7412 	  gcc_assert (DECL_P (t));
7413 	  n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7414 	  if (n2)
7415 	    omp_notice_variable (ctx, t, true);
7416 	}
7417     }
7418 
7419   shared = ((flags | n->value) & GOVD_SHARED) != 0;
7420   ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7421 
7422   /* If nothing changed, there's nothing left to do.  */
7423   if ((n->value & flags) == flags)
7424     return ret;
7425   flags |= n->value;
7426   n->value = flags;
7427 
7428  do_outer:
7429   /* If the variable is private in the current context, then we don't
7430      need to propagate anything to an outer context.  */
7431   if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7432     return ret;
7433   if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7434       == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7435     return ret;
7436   if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7437 		| GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7438       == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7439     return ret;
7440   if (ctx->outer_context
7441       && omp_notice_variable (ctx->outer_context, decl, in_code))
7442     return true;
7443   return ret;
7444 }
7445 
7446 /* Verify that DECL is private within CTX.  If there's specific information
7447    to the contrary in the innermost scope, generate an error.  */
7448 
7449 static bool
7450 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7451 {
7452   splay_tree_node n;
7453 
7454   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7455   if (n != NULL)
7456     {
7457       if (n->value & GOVD_SHARED)
7458 	{
7459 	  if (ctx == gimplify_omp_ctxp)
7460 	    {
7461 	      if (simd)
7462 		error ("iteration variable %qE is predetermined linear",
7463 		       DECL_NAME (decl));
7464 	      else
7465 		error ("iteration variable %qE should be private",
7466 		       DECL_NAME (decl));
7467 	      n->value = GOVD_PRIVATE;
7468 	      return true;
7469 	    }
7470 	  else
7471 	    return false;
7472 	}
7473       else if ((n->value & GOVD_EXPLICIT) != 0
7474 	       && (ctx == gimplify_omp_ctxp
7475 		   || (ctx->region_type == ORT_COMBINED_PARALLEL
7476 		       && gimplify_omp_ctxp->outer_context == ctx)))
7477 	{
7478 	  if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7479 	    error ("iteration variable %qE should not be firstprivate",
7480 		   DECL_NAME (decl));
7481 	  else if ((n->value & GOVD_REDUCTION) != 0)
7482 	    error ("iteration variable %qE should not be reduction",
7483 		   DECL_NAME (decl));
7484 	  else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
7485 	    error ("iteration variable %qE should not be linear",
7486 		   DECL_NAME (decl));
7487 	  else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
7488 	    error ("iteration variable %qE should not be lastprivate",
7489 		   DECL_NAME (decl));
7490 	  else if (simd && (n->value & GOVD_PRIVATE) != 0)
7491 	    error ("iteration variable %qE should not be private",
7492 		   DECL_NAME (decl));
7493 	  else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
7494 	    error ("iteration variable %qE is predetermined linear",
7495 		   DECL_NAME (decl));
7496 	}
7497       return (ctx == gimplify_omp_ctxp
7498 	      || (ctx->region_type == ORT_COMBINED_PARALLEL
7499 		  && gimplify_omp_ctxp->outer_context == ctx));
7500     }
7501 
7502   if (ctx->region_type != ORT_WORKSHARE
7503       && ctx->region_type != ORT_SIMD
7504       && ctx->region_type != ORT_ACC)
7505     return false;
7506   else if (ctx->outer_context)
7507     return omp_is_private (ctx->outer_context, decl, simd);
7508   return false;
7509 }
7510 
7511 /* Return true if DECL is private within a parallel region
7512    that binds to the current construct's context or in parallel
7513    region's REDUCTION clause.  */
7514 
7515 static bool
7516 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7517 {
7518   splay_tree_node n;
7519 
7520   do
7521     {
7522       ctx = ctx->outer_context;
7523       if (ctx == NULL)
7524 	{
7525 	  if (is_global_var (decl))
7526 	    return false;
7527 
7528 	  /* References might be private, but might be shared too,
7529 	     when checking for copyprivate, assume they might be
7530 	     private, otherwise assume they might be shared.  */
7531 	  if (copyprivate)
7532 	    return true;
7533 
7534 	  if (lang_hooks.decls.omp_privatize_by_reference (decl))
7535 	    return false;
7536 
7537 	  /* Treat C++ privatized non-static data members outside
7538 	     of the privatization the same.  */
7539 	  if (omp_member_access_dummy_var (decl))
7540 	    return false;
7541 
7542 	  return true;
7543 	}
7544 
7545       n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7546 
7547       if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7548 	  && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7549 	continue;
7550 
7551       if (n != NULL)
7552 	{
7553 	  if ((n->value & GOVD_LOCAL) != 0
7554 	      && omp_member_access_dummy_var (decl))
7555 	    return false;
7556 	  return (n->value & GOVD_SHARED) == 0;
7557 	}
7558     }
7559   while (ctx->region_type == ORT_WORKSHARE
7560 	 || ctx->region_type == ORT_SIMD
7561 	 || ctx->region_type == ORT_ACC);
7562   return false;
7563 }
7564 
7565 /* Callback for walk_tree to find a DECL_EXPR for the given DECL.  */
7566 
7567 static tree
7568 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7569 {
7570   tree t = *tp;
7571 
7572   /* If this node has been visited, unmark it and keep looking.  */
7573   if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7574     return t;
7575 
7576   if (IS_TYPE_OR_DECL_P (t))
7577     *walk_subtrees = 0;
7578   return NULL_TREE;
7579 }
7580 
7581 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7582    and previous omp contexts.  */
7583 
7584 static void
7585 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7586 			   enum omp_region_type region_type,
7587 			   enum tree_code code)
7588 {
7589   struct gimplify_omp_ctx *ctx, *outer_ctx;
7590   tree c;
7591   hash_map<tree, tree> *struct_map_to_clause = NULL;
7592   tree *prev_list_p = NULL;
7593 
7594   ctx = new_omp_context (region_type);
7595   outer_ctx = ctx->outer_context;
7596   if (code == OMP_TARGET)
7597     {
7598       if (!lang_GNU_Fortran ())
7599 	ctx->target_map_pointers_as_0len_arrays = true;
7600       ctx->target_map_scalars_firstprivate = true;
7601     }
7602   if (!lang_GNU_Fortran ())
7603     switch (code)
7604       {
7605       case OMP_TARGET:
7606       case OMP_TARGET_DATA:
7607       case OMP_TARGET_ENTER_DATA:
7608       case OMP_TARGET_EXIT_DATA:
7609       case OACC_DECLARE:
7610       case OACC_HOST_DATA:
7611 	ctx->target_firstprivatize_array_bases = true;
7612       default:
7613 	break;
7614       }
7615 
7616   while ((c = *list_p) != NULL)
7617     {
7618       bool remove = false;
7619       bool notice_outer = true;
7620       const char *check_non_private = NULL;
7621       unsigned int flags;
7622       tree decl;
7623 
7624       switch (OMP_CLAUSE_CODE (c))
7625 	{
7626 	case OMP_CLAUSE_PRIVATE:
7627 	  flags = GOVD_PRIVATE | GOVD_EXPLICIT;
7628 	  if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
7629 	    {
7630 	      flags |= GOVD_PRIVATE_OUTER_REF;
7631 	      OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
7632 	    }
7633 	  else
7634 	    notice_outer = false;
7635 	  goto do_add;
7636 	case OMP_CLAUSE_SHARED:
7637 	  flags = GOVD_SHARED | GOVD_EXPLICIT;
7638 	  goto do_add;
7639 	case OMP_CLAUSE_FIRSTPRIVATE:
7640 	  flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7641 	  check_non_private = "firstprivate";
7642 	  goto do_add;
7643 	case OMP_CLAUSE_LASTPRIVATE:
7644 	  flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
7645 	  check_non_private = "lastprivate";
7646 	  decl = OMP_CLAUSE_DECL (c);
7647 	  if (error_operand_p (decl))
7648 	    goto do_add;
7649 	  else if (outer_ctx
7650 		   && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
7651 		       || outer_ctx->region_type == ORT_COMBINED_TEAMS)
7652 		   && splay_tree_lookup (outer_ctx->variables,
7653 					 (splay_tree_key) decl) == NULL)
7654 	    {
7655 	      omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
7656 	      if (outer_ctx->outer_context)
7657 		omp_notice_variable (outer_ctx->outer_context, decl, true);
7658 	    }
7659 	  else if (outer_ctx
7660 		   && (outer_ctx->region_type & ORT_TASK) != 0
7661 		   && outer_ctx->combined_loop
7662 		   && splay_tree_lookup (outer_ctx->variables,
7663 					 (splay_tree_key) decl) == NULL)
7664 	    {
7665 	      omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7666 	      if (outer_ctx->outer_context)
7667 		omp_notice_variable (outer_ctx->outer_context, decl, true);
7668 	    }
7669 	  else if (outer_ctx
7670 		   && (outer_ctx->region_type == ORT_WORKSHARE
7671 		       || outer_ctx->region_type == ORT_ACC)
7672 		   && outer_ctx->combined_loop
7673 		   && splay_tree_lookup (outer_ctx->variables,
7674 					 (splay_tree_key) decl) == NULL
7675 		   && !omp_check_private (outer_ctx, decl, false))
7676 	    {
7677 	      omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7678 	      if (outer_ctx->outer_context
7679 		  && (outer_ctx->outer_context->region_type
7680 		      == ORT_COMBINED_PARALLEL)
7681 		  && splay_tree_lookup (outer_ctx->outer_context->variables,
7682 					(splay_tree_key) decl) == NULL)
7683 		{
7684 		  struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
7685 		  omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
7686 		  if (octx->outer_context)
7687 		    {
7688 		      octx = octx->outer_context;
7689 		      if (octx->region_type == ORT_WORKSHARE
7690 			  && octx->combined_loop
7691 			  && splay_tree_lookup (octx->variables,
7692 						(splay_tree_key) decl) == NULL
7693 			  && !omp_check_private (octx, decl, false))
7694 			{
7695 			  omp_add_variable (octx, decl,
7696 					    GOVD_LASTPRIVATE | GOVD_SEEN);
7697 			  octx = octx->outer_context;
7698 			  if (octx
7699 			      && octx->region_type == ORT_COMBINED_TEAMS
7700 			      && (splay_tree_lookup (octx->variables,
7701 						     (splay_tree_key) decl)
7702 				  == NULL))
7703 			    {
7704 			      omp_add_variable (octx, decl,
7705 						GOVD_SHARED | GOVD_SEEN);
7706 			      octx = octx->outer_context;
7707 			    }
7708 			}
7709 		      if (octx)
7710 			omp_notice_variable (octx, decl, true);
7711 		    }
7712 		}
7713 	      else if (outer_ctx->outer_context)
7714 		omp_notice_variable (outer_ctx->outer_context, decl, true);
7715 	    }
7716 	  goto do_add;
7717 	case OMP_CLAUSE_REDUCTION:
7718 	  flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
7719 	  /* OpenACC permits reductions on private variables.  */
7720 	  if (!(region_type & ORT_ACC))
7721 	    check_non_private = "reduction";
7722 	  decl = OMP_CLAUSE_DECL (c);
7723 	  if (TREE_CODE (decl) == MEM_REF)
7724 	    {
7725 	      tree type = TREE_TYPE (decl);
7726 	      if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
7727 				 NULL, is_gimple_val, fb_rvalue, false)
7728 		  == GS_ERROR)
7729 		{
7730 		  remove = true;
7731 		  break;
7732 		}
7733 	      tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7734 	      if (DECL_P (v))
7735 		{
7736 		  omp_firstprivatize_variable (ctx, v);
7737 		  omp_notice_variable (ctx, v, true);
7738 		}
7739 	      decl = TREE_OPERAND (decl, 0);
7740 	      if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
7741 		{
7742 		  if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
7743 				     NULL, is_gimple_val, fb_rvalue, false)
7744 		      == GS_ERROR)
7745 		    {
7746 		      remove = true;
7747 		      break;
7748 		    }
7749 		  v = TREE_OPERAND (decl, 1);
7750 		  if (DECL_P (v))
7751 		    {
7752 		      omp_firstprivatize_variable (ctx, v);
7753 		      omp_notice_variable (ctx, v, true);
7754 		    }
7755 		  decl = TREE_OPERAND (decl, 0);
7756 		}
7757 	      if (TREE_CODE (decl) == ADDR_EXPR
7758 		  || TREE_CODE (decl) == INDIRECT_REF)
7759 		decl = TREE_OPERAND (decl, 0);
7760 	    }
7761 	  goto do_add_decl;
7762 	case OMP_CLAUSE_LINEAR:
7763 	  if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
7764 			     is_gimple_val, fb_rvalue) == GS_ERROR)
7765 	    {
7766 	      remove = true;
7767 	      break;
7768 	    }
7769 	  else
7770 	    {
7771 	      if (code == OMP_SIMD
7772 		  && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7773 		{
7774 		  struct gimplify_omp_ctx *octx = outer_ctx;
7775 		  if (octx
7776 		      && octx->region_type == ORT_WORKSHARE
7777 		      && octx->combined_loop
7778 		      && !octx->distribute)
7779 		    {
7780 		      if (octx->outer_context
7781 			  && (octx->outer_context->region_type
7782 			      == ORT_COMBINED_PARALLEL))
7783 			octx = octx->outer_context->outer_context;
7784 		      else
7785 			octx = octx->outer_context;
7786 		    }
7787 		  if (octx
7788 		      && octx->region_type == ORT_WORKSHARE
7789 		      && octx->combined_loop
7790 		      && octx->distribute)
7791 		    {
7792 		      error_at (OMP_CLAUSE_LOCATION (c),
7793 				"%<linear%> clause for variable other than "
7794 				"loop iterator specified on construct "
7795 				"combined with %<distribute%>");
7796 		      remove = true;
7797 		      break;
7798 		    }
7799 		}
7800 	      /* For combined #pragma omp parallel for simd, need to put
7801 		 lastprivate and perhaps firstprivate too on the
7802 		 parallel.  Similarly for #pragma omp for simd.  */
7803 	      struct gimplify_omp_ctx *octx = outer_ctx;
7804 	      decl = NULL_TREE;
7805 	      do
7806 		{
7807 		  if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7808 		      && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7809 		    break;
7810 		  decl = OMP_CLAUSE_DECL (c);
7811 		  if (error_operand_p (decl))
7812 		    {
7813 		      decl = NULL_TREE;
7814 		      break;
7815 		    }
7816 		  flags = GOVD_SEEN;
7817 		  if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7818 		    flags |= GOVD_FIRSTPRIVATE;
7819 		  if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7820 		    flags |= GOVD_LASTPRIVATE;
7821 		  if (octx
7822 		      && octx->region_type == ORT_WORKSHARE
7823 		      && octx->combined_loop)
7824 		    {
7825 		      if (octx->outer_context
7826 			  && (octx->outer_context->region_type
7827 			      == ORT_COMBINED_PARALLEL))
7828 			octx = octx->outer_context;
7829 		      else if (omp_check_private (octx, decl, false))
7830 			break;
7831 		    }
7832 		  else if (octx
7833 			   && (octx->region_type & ORT_TASK) != 0
7834 			   && octx->combined_loop)
7835 		    ;
7836 		  else if (octx
7837 			   && octx->region_type == ORT_COMBINED_PARALLEL
7838 			   && ctx->region_type == ORT_WORKSHARE
7839 			   && octx == outer_ctx)
7840 		    flags = GOVD_SEEN | GOVD_SHARED;
7841 		  else if (octx
7842 			   && octx->region_type == ORT_COMBINED_TEAMS)
7843 		    flags = GOVD_SEEN | GOVD_SHARED;
7844 		  else if (octx
7845 			   && octx->region_type == ORT_COMBINED_TARGET)
7846 		    {
7847 		      flags &= ~GOVD_LASTPRIVATE;
7848 		      if (flags == GOVD_SEEN)
7849 			break;
7850 		    }
7851 		  else
7852 		    break;
7853 		  splay_tree_node on
7854 		    = splay_tree_lookup (octx->variables,
7855 					 (splay_tree_key) decl);
7856 		  if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
7857 		    {
7858 		      octx = NULL;
7859 		      break;
7860 		    }
7861 		  omp_add_variable (octx, decl, flags);
7862 		  if (octx->outer_context == NULL)
7863 		    break;
7864 		  octx = octx->outer_context;
7865 		}
7866 	      while (1);
7867 	      if (octx
7868 		  && decl
7869 		  && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7870 		      || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7871 		omp_notice_variable (octx, decl, true);
7872 	    }
7873 	  flags = GOVD_LINEAR | GOVD_EXPLICIT;
7874 	  if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7875 	      && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7876 	    {
7877 	      notice_outer = false;
7878 	      flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7879 	    }
7880 	  goto do_add;
7881 
7882 	case OMP_CLAUSE_MAP:
7883 	  decl = OMP_CLAUSE_DECL (c);
7884 	  if (error_operand_p (decl))
7885 	    remove = true;
7886 	  switch (code)
7887 	    {
7888 	    case OMP_TARGET:
7889 	      break;
7890 	    case OACC_DATA:
7891 	      if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
7892 		break;
7893 	      /* FALLTHRU */
7894 	    case OMP_TARGET_DATA:
7895 	    case OMP_TARGET_ENTER_DATA:
7896 	    case OMP_TARGET_EXIT_DATA:
7897 	    case OACC_ENTER_DATA:
7898 	    case OACC_EXIT_DATA:
7899 	    case OACC_HOST_DATA:
7900 	      if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7901 		  || (OMP_CLAUSE_MAP_KIND (c)
7902 		      == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7903 		/* For target {,enter ,exit }data only the array slice is
7904 		   mapped, but not the pointer to it.  */
7905 		remove = true;
7906 	      break;
7907 	    default:
7908 	      break;
7909 	    }
7910 	  if (remove)
7911 	    break;
7912 	  if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
7913 	    {
7914 	      struct gimplify_omp_ctx *octx;
7915 	      for (octx = outer_ctx; octx; octx = octx->outer_context)
7916 	        {
7917 		  if (octx->region_type != ORT_ACC_HOST_DATA)
7918 		    break;
7919 		  splay_tree_node n2
7920 		    = splay_tree_lookup (octx->variables,
7921 					 (splay_tree_key) decl);
7922 		  if (n2)
7923 		    error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
7924 			      "declared in enclosing %<host_data%> region",
7925 			      DECL_NAME (decl));
7926 		}
7927 	    }
7928 	  if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7929 	    OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7930 				  : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7931 	  if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7932 			     NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7933 	    {
7934 	      remove = true;
7935 	      break;
7936 	    }
7937 	  else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7938 		    || (OMP_CLAUSE_MAP_KIND (c)
7939 			== GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7940 		   && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
7941 	    {
7942 	      OMP_CLAUSE_SIZE (c)
7943 		= get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
7944 					   false);
7945 	      omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
7946 				GOVD_FIRSTPRIVATE | GOVD_SEEN);
7947 	    }
7948 	  if (!DECL_P (decl))
7949 	    {
7950 	      tree d = decl, *pd;
7951 	      if (TREE_CODE (d) == ARRAY_REF)
7952 		{
7953 		  while (TREE_CODE (d) == ARRAY_REF)
7954 		    d = TREE_OPERAND (d, 0);
7955 		  if (TREE_CODE (d) == COMPONENT_REF
7956 		      && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
7957 		    decl = d;
7958 		}
7959 	      pd = &OMP_CLAUSE_DECL (c);
7960 	      if (d == decl
7961 		  && TREE_CODE (decl) == INDIRECT_REF
7962 		  && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7963 		  && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7964 		      == REFERENCE_TYPE))
7965 		{
7966 		  pd = &TREE_OPERAND (decl, 0);
7967 		  decl = TREE_OPERAND (decl, 0);
7968 		}
7969 	      if (TREE_CODE (decl) == COMPONENT_REF)
7970 		{
7971 		  while (TREE_CODE (decl) == COMPONENT_REF)
7972 		    decl = TREE_OPERAND (decl, 0);
7973 		  if (TREE_CODE (decl) == INDIRECT_REF
7974 		      && DECL_P (TREE_OPERAND (decl, 0))
7975 		      && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7976 			  == REFERENCE_TYPE))
7977 		    decl = TREE_OPERAND (decl, 0);
7978 		}
7979 	      if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
7980 		  == GS_ERROR)
7981 		{
7982 		  remove = true;
7983 		  break;
7984 		}
7985 	      if (DECL_P (decl))
7986 		{
7987 		  if (error_operand_p (decl))
7988 		    {
7989 		      remove = true;
7990 		      break;
7991 		    }
7992 
7993 		  tree stype = TREE_TYPE (decl);
7994 		  if (TREE_CODE (stype) == REFERENCE_TYPE)
7995 		    stype = TREE_TYPE (stype);
7996 		  if (TYPE_SIZE_UNIT (stype) == NULL
7997 		      || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
7998 		    {
7999 		      error_at (OMP_CLAUSE_LOCATION (c),
8000 				"mapping field %qE of variable length "
8001 				"structure", OMP_CLAUSE_DECL (c));
8002 		      remove = true;
8003 		      break;
8004 		    }
8005 
8006 		  if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8007 		    {
8008 		      /* Error recovery.  */
8009 		      if (prev_list_p == NULL)
8010 			{
8011 			  remove = true;
8012 			  break;
8013 			}
8014 		      if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8015 			{
8016 			  tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
8017 			  if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
8018 			    {
8019 			      remove = true;
8020 			      break;
8021 			    }
8022 			}
8023 		    }
8024 
8025 		  tree offset;
8026 		  poly_int64 bitsize, bitpos;
8027 		  machine_mode mode;
8028 		  int unsignedp, reversep, volatilep = 0;
8029 		  tree base = OMP_CLAUSE_DECL (c);
8030 		  while (TREE_CODE (base) == ARRAY_REF)
8031 		    base = TREE_OPERAND (base, 0);
8032 		  if (TREE_CODE (base) == INDIRECT_REF)
8033 		    base = TREE_OPERAND (base, 0);
8034 		  base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8035 					      &mode, &unsignedp, &reversep,
8036 					      &volatilep);
8037 		  tree orig_base = base;
8038 		  if ((TREE_CODE (base) == INDIRECT_REF
8039 		       || (TREE_CODE (base) == MEM_REF
8040 			   && integer_zerop (TREE_OPERAND (base, 1))))
8041 		      && DECL_P (TREE_OPERAND (base, 0))
8042 		      && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8043 			  == REFERENCE_TYPE))
8044 		    base = TREE_OPERAND (base, 0);
8045 		  gcc_assert (base == decl
8046 			      && (offset == NULL_TREE
8047 				  || poly_int_tree_p (offset)));
8048 
8049 		  splay_tree_node n
8050 		    = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8051 		  bool ptr = (OMP_CLAUSE_MAP_KIND (c)
8052 			      == GOMP_MAP_ALWAYS_POINTER);
8053 		  if (n == NULL || (n->value & GOVD_MAP) == 0)
8054 		    {
8055 		      tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8056 						 OMP_CLAUSE_MAP);
8057 		      OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
8058 		      if (orig_base != base)
8059 			OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
8060 		      else
8061 			OMP_CLAUSE_DECL (l) = decl;
8062 		      OMP_CLAUSE_SIZE (l) = size_int (1);
8063 		      if (struct_map_to_clause == NULL)
8064 			struct_map_to_clause = new hash_map<tree, tree>;
8065 		      struct_map_to_clause->put (decl, l);
8066 		      if (ptr)
8067 			{
8068 			  enum gomp_map_kind mkind
8069 			    = code == OMP_TARGET_EXIT_DATA
8070 			      ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8071 			  tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8072 						      OMP_CLAUSE_MAP);
8073 			  OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8074 			  OMP_CLAUSE_DECL (c2)
8075 			    = unshare_expr (OMP_CLAUSE_DECL (c));
8076 			  OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
8077 			  OMP_CLAUSE_SIZE (c2)
8078 			    = TYPE_SIZE_UNIT (ptr_type_node);
8079 			  OMP_CLAUSE_CHAIN (l) = c2;
8080 			  if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8081 			    {
8082 			      tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8083 			      tree c3
8084 				= build_omp_clause (OMP_CLAUSE_LOCATION (c),
8085 						    OMP_CLAUSE_MAP);
8086 			      OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8087 			      OMP_CLAUSE_DECL (c3)
8088 				= unshare_expr (OMP_CLAUSE_DECL (c4));
8089 			      OMP_CLAUSE_SIZE (c3)
8090 				= TYPE_SIZE_UNIT (ptr_type_node);
8091 			      OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8092 			      OMP_CLAUSE_CHAIN (c2) = c3;
8093 			    }
8094 			  *prev_list_p = l;
8095 			  prev_list_p = NULL;
8096 			}
8097 		      else
8098 			{
8099 			  OMP_CLAUSE_CHAIN (l) = c;
8100 			  *list_p = l;
8101 			  list_p = &OMP_CLAUSE_CHAIN (l);
8102 			}
8103 		      if (orig_base != base && code == OMP_TARGET)
8104 			{
8105 			  tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8106 						      OMP_CLAUSE_MAP);
8107 			  enum gomp_map_kind mkind
8108 			    = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
8109 			  OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8110 			  OMP_CLAUSE_DECL (c2) = decl;
8111 			  OMP_CLAUSE_SIZE (c2) = size_zero_node;
8112 			  OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
8113 			  OMP_CLAUSE_CHAIN (l) = c2;
8114 			}
8115 		      flags = GOVD_MAP | GOVD_EXPLICIT;
8116 		      if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
8117 			flags |= GOVD_SEEN;
8118 		      goto do_add_decl;
8119 		    }
8120 		  else
8121 		    {
8122 		      tree *osc = struct_map_to_clause->get (decl);
8123 		      tree *sc = NULL, *scp = NULL;
8124 		      if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
8125 			n->value |= GOVD_SEEN;
8126 		      poly_offset_int o1, o2;
8127 		      if (offset)
8128 			o1 = wi::to_poly_offset (offset);
8129 		      else
8130 			o1 = 0;
8131 		      if (maybe_ne (bitpos, 0))
8132 			o1 += bits_to_bytes_round_down (bitpos);
8133 		      sc = &OMP_CLAUSE_CHAIN (*osc);
8134 		      if (*sc != c
8135 			  && (OMP_CLAUSE_MAP_KIND (*sc)
8136 			      == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8137 			sc = &OMP_CLAUSE_CHAIN (*sc);
8138 		      for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
8139 			if (ptr && sc == prev_list_p)
8140 			  break;
8141 			else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8142 				 != COMPONENT_REF
8143 				 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8144 				     != INDIRECT_REF)
8145 				 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8146 				     != ARRAY_REF))
8147 			  break;
8148 			else
8149 			  {
8150 			    tree offset2;
8151 			    poly_int64 bitsize2, bitpos2;
8152 			    base = OMP_CLAUSE_DECL (*sc);
8153 			    if (TREE_CODE (base) == ARRAY_REF)
8154 			      {
8155 				while (TREE_CODE (base) == ARRAY_REF)
8156 				  base = TREE_OPERAND (base, 0);
8157 				if (TREE_CODE (base) != COMPONENT_REF
8158 				    || (TREE_CODE (TREE_TYPE (base))
8159 					!= ARRAY_TYPE))
8160 				  break;
8161 			      }
8162 			    else if (TREE_CODE (base) == INDIRECT_REF
8163 				     && (TREE_CODE (TREE_OPERAND (base, 0))
8164 					 == COMPONENT_REF)
8165 				     && (TREE_CODE (TREE_TYPE
8166 						     (TREE_OPERAND (base, 0)))
8167 					 == REFERENCE_TYPE))
8168 			      base = TREE_OPERAND (base, 0);
8169 			    base = get_inner_reference (base, &bitsize2,
8170 							&bitpos2, &offset2,
8171 							&mode, &unsignedp,
8172 							&reversep, &volatilep);
8173 			    if ((TREE_CODE (base) == INDIRECT_REF
8174 				 || (TREE_CODE (base) == MEM_REF
8175 				     && integer_zerop (TREE_OPERAND (base,
8176 								     1))))
8177 				&& DECL_P (TREE_OPERAND (base, 0))
8178 				&& (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8179 									0)))
8180 				    == REFERENCE_TYPE))
8181 			      base = TREE_OPERAND (base, 0);
8182 			    if (base != decl)
8183 			      break;
8184 			    if (scp)
8185 			      continue;
8186 			    gcc_assert (offset2 == NULL_TREE
8187 					|| poly_int_tree_p (offset2));
8188 			    tree d1 = OMP_CLAUSE_DECL (*sc);
8189 			    tree d2 = OMP_CLAUSE_DECL (c);
8190 			    while (TREE_CODE (d1) == ARRAY_REF)
8191 			      d1 = TREE_OPERAND (d1, 0);
8192 			    while (TREE_CODE (d2) == ARRAY_REF)
8193 			      d2 = TREE_OPERAND (d2, 0);
8194 			    if (TREE_CODE (d1) == INDIRECT_REF)
8195 			      d1 = TREE_OPERAND (d1, 0);
8196 			    if (TREE_CODE (d2) == INDIRECT_REF)
8197 			      d2 = TREE_OPERAND (d2, 0);
8198 			    while (TREE_CODE (d1) == COMPONENT_REF)
8199 			      if (TREE_CODE (d2) == COMPONENT_REF
8200 				  && TREE_OPERAND (d1, 1)
8201 				     == TREE_OPERAND (d2, 1))
8202 				{
8203 				  d1 = TREE_OPERAND (d1, 0);
8204 				  d2 = TREE_OPERAND (d2, 0);
8205 				}
8206 			      else
8207 				break;
8208 			    if (d1 == d2)
8209 			      {
8210 				error_at (OMP_CLAUSE_LOCATION (c),
8211 					  "%qE appears more than once in map "
8212 					  "clauses", OMP_CLAUSE_DECL (c));
8213 				remove = true;
8214 				break;
8215 			      }
8216 			    if (offset2)
8217 			      o2 = wi::to_poly_offset (offset2);
8218 			    else
8219 			      o2 = 0;
8220 			    o2 += bits_to_bytes_round_down (bitpos2);
8221 			    if (maybe_lt (o1, o2)
8222 				|| (known_eq (o1, o2)
8223 				    && maybe_lt (bitpos, bitpos2)))
8224 			      {
8225 				if (ptr)
8226 				  scp = sc;
8227 				else
8228 				  break;
8229 			      }
8230 			  }
8231 		      if (remove)
8232 			break;
8233 		      OMP_CLAUSE_SIZE (*osc)
8234 			= size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8235 				      size_one_node);
8236 		      if (ptr)
8237 			{
8238 			  tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8239 						      OMP_CLAUSE_MAP);
8240 			  tree cl = NULL_TREE;
8241 			  enum gomp_map_kind mkind
8242 			    = code == OMP_TARGET_EXIT_DATA
8243 			      ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8244 			  OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8245 			  OMP_CLAUSE_DECL (c2)
8246 			    = unshare_expr (OMP_CLAUSE_DECL (c));
8247 			  OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8248 			  OMP_CLAUSE_SIZE (c2)
8249 			    = TYPE_SIZE_UNIT (ptr_type_node);
8250 			  cl = scp ? *prev_list_p : c2;
8251 			  if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8252 			    {
8253 			      tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8254 			      tree c3
8255 				= build_omp_clause (OMP_CLAUSE_LOCATION (c),
8256 						    OMP_CLAUSE_MAP);
8257 			      OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8258 			      OMP_CLAUSE_DECL (c3)
8259 				= unshare_expr (OMP_CLAUSE_DECL (c4));
8260 			      OMP_CLAUSE_SIZE (c3)
8261 				= TYPE_SIZE_UNIT (ptr_type_node);
8262 			      OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8263 			      if (!scp)
8264 				OMP_CLAUSE_CHAIN (c2) = c3;
8265 			      else
8266 				cl = c3;
8267 			    }
8268 			  if (scp)
8269 			    *scp = c2;
8270 			  if (sc == prev_list_p)
8271 			    {
8272 			      *sc = cl;
8273 			      prev_list_p = NULL;
8274 			    }
8275 			  else
8276 			    {
8277 			      *prev_list_p = OMP_CLAUSE_CHAIN (c);
8278 			      list_p = prev_list_p;
8279 			      prev_list_p = NULL;
8280 			      OMP_CLAUSE_CHAIN (c) = *sc;
8281 			      *sc = cl;
8282 			      continue;
8283 			    }
8284 			}
8285 		      else if (*sc != c)
8286 			{
8287 			  *list_p = OMP_CLAUSE_CHAIN (c);
8288 			  OMP_CLAUSE_CHAIN (c) = *sc;
8289 			  *sc = c;
8290 			  continue;
8291 			}
8292 		    }
8293 		}
8294 	      if (!remove
8295 		  && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8296 		  && OMP_CLAUSE_CHAIN (c)
8297 		  && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8298 		  && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8299 		      == GOMP_MAP_ALWAYS_POINTER))
8300 		prev_list_p = list_p;
8301 	      break;
8302 	    }
8303 	  flags = GOVD_MAP | GOVD_EXPLICIT;
8304 	  if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8305 	      || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8306 	    flags |= GOVD_MAP_ALWAYS_TO;
8307 	  goto do_add;
8308 
8309 	case OMP_CLAUSE_DEPEND:
8310 	  if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8311 	    {
8312 	      tree deps = OMP_CLAUSE_DECL (c);
8313 	      while (deps && TREE_CODE (deps) == TREE_LIST)
8314 		{
8315 		  if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8316 		      && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8317 		    gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8318 				   pre_p, NULL, is_gimple_val, fb_rvalue);
8319 		  deps = TREE_CHAIN (deps);
8320 		}
8321 	      break;
8322 	    }
8323 	  else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8324 	    break;
8325 	  if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8326 	    {
8327 	      gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8328 			     NULL, is_gimple_val, fb_rvalue);
8329 	      OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8330 	    }
8331 	  if (error_operand_p (OMP_CLAUSE_DECL (c)))
8332 	    {
8333 	      remove = true;
8334 	      break;
8335 	    }
8336 	  OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8337 	  if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8338 			     is_gimple_val, fb_rvalue) == GS_ERROR)
8339 	    {
8340 	      remove = true;
8341 	      break;
8342 	    }
8343 	  break;
8344 
8345 	case OMP_CLAUSE_TO:
8346 	case OMP_CLAUSE_FROM:
8347 	case OMP_CLAUSE__CACHE_:
8348 	  decl = OMP_CLAUSE_DECL (c);
8349 	  if (error_operand_p (decl))
8350 	    {
8351 	      remove = true;
8352 	      break;
8353 	    }
8354 	  if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8355 	    OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8356 				  : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8357 	  if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8358 			     NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8359 	    {
8360 	      remove = true;
8361 	      break;
8362 	    }
8363 	  if (!DECL_P (decl))
8364 	    {
8365 	      if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8366 				 NULL, is_gimple_lvalue, fb_lvalue)
8367 		  == GS_ERROR)
8368 		{
8369 		  remove = true;
8370 		  break;
8371 		}
8372 	      break;
8373 	    }
8374 	  goto do_notice;
8375 
8376 	case OMP_CLAUSE_USE_DEVICE_PTR:
8377 	  flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8378 	  goto do_add;
8379 	case OMP_CLAUSE_IS_DEVICE_PTR:
8380 	  flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8381 	  goto do_add;
8382 
8383 	do_add:
8384 	  decl = OMP_CLAUSE_DECL (c);
8385 	do_add_decl:
8386 	  if (error_operand_p (decl))
8387 	    {
8388 	      remove = true;
8389 	      break;
8390 	    }
8391 	  if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8392 	    {
8393 	      tree t = omp_member_access_dummy_var (decl);
8394 	      if (t)
8395 		{
8396 		  tree v = DECL_VALUE_EXPR (decl);
8397 		  DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8398 		  if (outer_ctx)
8399 		    omp_notice_variable (outer_ctx, t, true);
8400 		}
8401 	    }
8402 	  if (code == OACC_DATA
8403 	      && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8404 	      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8405 	    flags |= GOVD_MAP_0LEN_ARRAY;
8406 	  omp_add_variable (ctx, decl, flags);
8407 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8408 	      && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8409 	    {
8410 	      omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8411 				GOVD_LOCAL | GOVD_SEEN);
8412 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8413 		  && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8414 				find_decl_expr,
8415 				OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8416 				NULL) == NULL_TREE)
8417 		omp_add_variable (ctx,
8418 				  OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8419 				  GOVD_LOCAL | GOVD_SEEN);
8420 	      gimplify_omp_ctxp = ctx;
8421 	      push_gimplify_context ();
8422 
8423 	      OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8424 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8425 
8426 	      gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8427 		  		&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8428 	      pop_gimplify_context
8429 		(gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8430 	      push_gimplify_context ();
8431 	      gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8432 		  		&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8433 	      pop_gimplify_context
8434 		(gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8435 	      OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8436 	      OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8437 
8438 	      gimplify_omp_ctxp = outer_ctx;
8439 	    }
8440 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8441 		   && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8442 	    {
8443 	      gimplify_omp_ctxp = ctx;
8444 	      push_gimplify_context ();
8445 	      if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8446 		{
8447 		  tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8448 				      NULL, NULL);
8449 		  TREE_SIDE_EFFECTS (bind) = 1;
8450 		  BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8451 		  OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8452 		}
8453 	      gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8454 				&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8455 	      pop_gimplify_context
8456 		(gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8457 	      OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8458 
8459 	      gimplify_omp_ctxp = outer_ctx;
8460 	    }
8461 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8462 		   && OMP_CLAUSE_LINEAR_STMT (c))
8463 	    {
8464 	      gimplify_omp_ctxp = ctx;
8465 	      push_gimplify_context ();
8466 	      if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8467 		{
8468 		  tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8469 				      NULL, NULL);
8470 		  TREE_SIDE_EFFECTS (bind) = 1;
8471 		  BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8472 		  OMP_CLAUSE_LINEAR_STMT (c) = bind;
8473 		}
8474 	      gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8475 				&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8476 	      pop_gimplify_context
8477 		(gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8478 	      OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8479 
8480 	      gimplify_omp_ctxp = outer_ctx;
8481 	    }
8482 	  if (notice_outer)
8483 	    goto do_notice;
8484 	  break;
8485 
8486 	case OMP_CLAUSE_COPYIN:
8487 	case OMP_CLAUSE_COPYPRIVATE:
8488 	  decl = OMP_CLAUSE_DECL (c);
8489 	  if (error_operand_p (decl))
8490 	    {
8491 	      remove = true;
8492 	      break;
8493 	    }
8494 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8495 	      && !remove
8496 	      && !omp_check_private (ctx, decl, true))
8497 	    {
8498 	      remove = true;
8499 	      if (is_global_var (decl))
8500 		{
8501 		  if (DECL_THREAD_LOCAL_P (decl))
8502 		    remove = false;
8503 		  else if (DECL_HAS_VALUE_EXPR_P (decl))
8504 		    {
8505 		      tree value = get_base_address (DECL_VALUE_EXPR (decl));
8506 
8507 		      if (value
8508 			  && DECL_P (value)
8509 			  && DECL_THREAD_LOCAL_P (value))
8510 			remove = false;
8511 		    }
8512 		}
8513 	      if (remove)
8514 		error_at (OMP_CLAUSE_LOCATION (c),
8515 			  "copyprivate variable %qE is not threadprivate"
8516 			  " or private in outer context", DECL_NAME (decl));
8517 	    }
8518 	do_notice:
8519 	  if (outer_ctx)
8520 	    omp_notice_variable (outer_ctx, decl, true);
8521 	  if (check_non_private
8522 	      && region_type == ORT_WORKSHARE
8523 	      && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8524 		  || decl == OMP_CLAUSE_DECL (c)
8525 		  || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
8526 		      && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8527 			  == ADDR_EXPR
8528 			  || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8529 			      == POINTER_PLUS_EXPR
8530 			      && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8531 						(OMP_CLAUSE_DECL (c), 0), 0))
8532 				  == ADDR_EXPR)))))
8533 	      && omp_check_private (ctx, decl, false))
8534 	    {
8535 	      error ("%s variable %qE is private in outer context",
8536 		     check_non_private, DECL_NAME (decl));
8537 	      remove = true;
8538 	    }
8539 	  break;
8540 
8541 	case OMP_CLAUSE_IF:
8542 	  if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
8543 	      && OMP_CLAUSE_IF_MODIFIER (c) != code)
8544 	    {
8545 	      const char *p[2];
8546 	      for (int i = 0; i < 2; i++)
8547 		switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
8548 		  {
8549 		  case OMP_PARALLEL: p[i] = "parallel"; break;
8550 		  case OMP_TASK: p[i] = "task"; break;
8551 		  case OMP_TASKLOOP: p[i] = "taskloop"; break;
8552 		  case OMP_TARGET_DATA: p[i] = "target data"; break;
8553 		  case OMP_TARGET: p[i] = "target"; break;
8554 		  case OMP_TARGET_UPDATE: p[i] = "target update"; break;
8555 		  case OMP_TARGET_ENTER_DATA:
8556 		    p[i] = "target enter data"; break;
8557 		  case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
8558 		  default: gcc_unreachable ();
8559 		  }
8560 	      error_at (OMP_CLAUSE_LOCATION (c),
8561 			"expected %qs %<if%> clause modifier rather than %qs",
8562 			p[0], p[1]);
8563 	      remove = true;
8564 	    }
8565 	  /* Fall through.  */
8566 
8567 	case OMP_CLAUSE_FINAL:
8568 	  OMP_CLAUSE_OPERAND (c, 0)
8569 	    = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
8570 	  /* Fall through.  */
8571 
8572 	case OMP_CLAUSE_SCHEDULE:
8573 	case OMP_CLAUSE_NUM_THREADS:
8574 	case OMP_CLAUSE_NUM_TEAMS:
8575 	case OMP_CLAUSE_THREAD_LIMIT:
8576 	case OMP_CLAUSE_DIST_SCHEDULE:
8577 	case OMP_CLAUSE_DEVICE:
8578 	case OMP_CLAUSE_PRIORITY:
8579 	case OMP_CLAUSE_GRAINSIZE:
8580 	case OMP_CLAUSE_NUM_TASKS:
8581 	case OMP_CLAUSE_HINT:
8582 	case OMP_CLAUSE_ASYNC:
8583 	case OMP_CLAUSE_WAIT:
8584 	case OMP_CLAUSE_NUM_GANGS:
8585 	case OMP_CLAUSE_NUM_WORKERS:
8586 	case OMP_CLAUSE_VECTOR_LENGTH:
8587 	case OMP_CLAUSE_WORKER:
8588 	case OMP_CLAUSE_VECTOR:
8589 	  if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8590 			     is_gimple_val, fb_rvalue) == GS_ERROR)
8591 	    remove = true;
8592 	  break;
8593 
8594 	case OMP_CLAUSE_GANG:
8595 	  if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8596 			     is_gimple_val, fb_rvalue) == GS_ERROR)
8597 	    remove = true;
8598 	  if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
8599 			     is_gimple_val, fb_rvalue) == GS_ERROR)
8600 	    remove = true;
8601 	  break;
8602 
8603 	case OMP_CLAUSE_NOWAIT:
8604 	case OMP_CLAUSE_ORDERED:
8605 	case OMP_CLAUSE_UNTIED:
8606 	case OMP_CLAUSE_COLLAPSE:
8607 	case OMP_CLAUSE_TILE:
8608 	case OMP_CLAUSE_AUTO:
8609 	case OMP_CLAUSE_SEQ:
8610 	case OMP_CLAUSE_INDEPENDENT:
8611 	case OMP_CLAUSE_MERGEABLE:
8612 	case OMP_CLAUSE_PROC_BIND:
8613 	case OMP_CLAUSE_SAFELEN:
8614 	case OMP_CLAUSE_SIMDLEN:
8615 	case OMP_CLAUSE_NOGROUP:
8616 	case OMP_CLAUSE_THREADS:
8617 	case OMP_CLAUSE_SIMD:
8618 	  break;
8619 
8620 	case OMP_CLAUSE_DEFAULTMAP:
8621 	  ctx->target_map_scalars_firstprivate = false;
8622 	  break;
8623 
8624 	case OMP_CLAUSE_ALIGNED:
8625 	  decl = OMP_CLAUSE_DECL (c);
8626 	  if (error_operand_p (decl))
8627 	    {
8628 	      remove = true;
8629 	      break;
8630 	    }
8631 	  if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
8632 			     is_gimple_val, fb_rvalue) == GS_ERROR)
8633 	    {
8634 	      remove = true;
8635 	      break;
8636 	    }
8637 	  if (!is_global_var (decl)
8638 	      && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8639 	    omp_add_variable (ctx, decl, GOVD_ALIGNED);
8640 	  break;
8641 
8642 	case OMP_CLAUSE_DEFAULT:
8643 	  ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
8644 	  break;
8645 
8646 	default:
8647 	  gcc_unreachable ();
8648 	}
8649 
8650       if (code == OACC_DATA
8651 	  && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8652 	  && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8653 	remove = true;
8654       if (remove)
8655 	*list_p = OMP_CLAUSE_CHAIN (c);
8656       else
8657 	list_p = &OMP_CLAUSE_CHAIN (c);
8658     }
8659 
8660   gimplify_omp_ctxp = ctx;
8661   if (struct_map_to_clause)
8662     delete struct_map_to_clause;
8663 }
8664 
8665 /* Return true if DECL is a candidate for shared to firstprivate
8666    optimization.  We only consider non-addressable scalars, not
8667    too big, and not references.  */
8668 
8669 static bool
8670 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
8671 {
8672   if (TREE_ADDRESSABLE (decl))
8673     return false;
8674   tree type = TREE_TYPE (decl);
8675   if (!is_gimple_reg_type (type)
8676       || TREE_CODE (type) == REFERENCE_TYPE
8677       || TREE_ADDRESSABLE (type))
8678     return false;
8679   /* Don't optimize too large decls, as each thread/task will have
8680      its own.  */
8681   HOST_WIDE_INT len = int_size_in_bytes (type);
8682   if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
8683     return false;
8684   if (lang_hooks.decls.omp_privatize_by_reference (decl))
8685     return false;
8686   return true;
8687 }
8688 
8689 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8690    For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8691    GOVD_WRITTEN in outer contexts.  */
8692 
8693 static void
8694 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
8695 {
8696   for (; ctx; ctx = ctx->outer_context)
8697     {
8698       splay_tree_node n = splay_tree_lookup (ctx->variables,
8699 					     (splay_tree_key) decl);
8700       if (n == NULL)
8701 	continue;
8702       else if (n->value & GOVD_SHARED)
8703 	{
8704 	  n->value |= GOVD_WRITTEN;
8705 	  return;
8706 	}
8707       else if (n->value & GOVD_DATA_SHARE_CLASS)
8708 	return;
8709     }
8710 }
8711 
8712 /* Helper callback for walk_gimple_seq to discover possible stores
8713    to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8714    GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8715    for those.  */
8716 
8717 static tree
8718 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
8719 {
8720   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8721 
8722   *walk_subtrees = 0;
8723   if (!wi->is_lhs)
8724     return NULL_TREE;
8725 
8726   tree op = *tp;
8727   do
8728     {
8729       if (handled_component_p (op))
8730 	op = TREE_OPERAND (op, 0);
8731       else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
8732 	       && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
8733 	op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
8734       else
8735 	break;
8736     }
8737   while (1);
8738   if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
8739     return NULL_TREE;
8740 
8741   omp_mark_stores (gimplify_omp_ctxp, op);
8742   return NULL_TREE;
8743 }
8744 
8745 /* Helper callback for walk_gimple_seq to discover possible stores
8746    to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8747    GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8748    for those.  */
8749 
8750 static tree
8751 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
8752 		      bool *handled_ops_p,
8753 		      struct walk_stmt_info *wi)
8754 {
8755   gimple *stmt = gsi_stmt (*gsi_p);
8756   switch (gimple_code (stmt))
8757     {
8758     /* Don't recurse on OpenMP constructs for which
8759        gimplify_adjust_omp_clauses already handled the bodies,
8760        except handle gimple_omp_for_pre_body.  */
8761     case GIMPLE_OMP_FOR:
8762       *handled_ops_p = true;
8763       if (gimple_omp_for_pre_body (stmt))
8764 	walk_gimple_seq (gimple_omp_for_pre_body (stmt),
8765 			 omp_find_stores_stmt, omp_find_stores_op, wi);
8766       break;
8767     case GIMPLE_OMP_PARALLEL:
8768     case GIMPLE_OMP_TASK:
8769     case GIMPLE_OMP_SECTIONS:
8770     case GIMPLE_OMP_SINGLE:
8771     case GIMPLE_OMP_TARGET:
8772     case GIMPLE_OMP_TEAMS:
8773     case GIMPLE_OMP_CRITICAL:
8774       *handled_ops_p = true;
8775       break;
8776     default:
8777       break;
8778     }
8779   return NULL_TREE;
8780 }
8781 
8782 struct gimplify_adjust_omp_clauses_data
8783 {
8784   tree *list_p;
8785   gimple_seq *pre_p;
8786 };
8787 
8788 /* For all variables that were not actually used within the context,
8789    remove PRIVATE, SHARED, and FIRSTPRIVATE clauses.  */
8790 
8791 static int
8792 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
8793 {
8794   tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
8795   gimple_seq *pre_p
8796     = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
8797   tree decl = (tree) n->key;
8798   unsigned flags = n->value;
8799   enum omp_clause_code code;
8800   tree clause;
8801   bool private_debug;
8802 
8803   if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
8804     return 0;
8805   if ((flags & GOVD_SEEN) == 0)
8806     return 0;
8807   if (flags & GOVD_DEBUG_PRIVATE)
8808     {
8809       gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
8810       private_debug = true;
8811     }
8812   else if (flags & GOVD_MAP)
8813     private_debug = false;
8814   else
8815     private_debug
8816       = lang_hooks.decls.omp_private_debug_clause (decl,
8817 						   !!(flags & GOVD_SHARED));
8818   if (private_debug)
8819     code = OMP_CLAUSE_PRIVATE;
8820   else if (flags & GOVD_MAP)
8821     {
8822       code = OMP_CLAUSE_MAP;
8823       if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8824 	  && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8825 	{
8826 	  error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
8827 	  return 0;
8828 	}
8829     }
8830   else if (flags & GOVD_SHARED)
8831     {
8832       if (is_global_var (decl))
8833 	{
8834 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8835 	  while (ctx != NULL)
8836 	    {
8837 	      splay_tree_node on
8838 		= splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8839 	      if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8840 				      | GOVD_PRIVATE | GOVD_REDUCTION
8841 				      | GOVD_LINEAR | GOVD_MAP)) != 0)
8842 		break;
8843 	      ctx = ctx->outer_context;
8844 	    }
8845 	  if (ctx == NULL)
8846 	    return 0;
8847 	}
8848       code = OMP_CLAUSE_SHARED;
8849     }
8850   else if (flags & GOVD_PRIVATE)
8851     code = OMP_CLAUSE_PRIVATE;
8852   else if (flags & GOVD_FIRSTPRIVATE)
8853     {
8854       code = OMP_CLAUSE_FIRSTPRIVATE;
8855       if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
8856 	  && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8857 	  && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8858 	{
8859 	  error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8860 		 "%<target%> construct", decl);
8861 	  return 0;
8862 	}
8863     }
8864   else if (flags & GOVD_LASTPRIVATE)
8865     code = OMP_CLAUSE_LASTPRIVATE;
8866   else if (flags & GOVD_ALIGNED)
8867     return 0;
8868   else
8869     gcc_unreachable ();
8870 
8871   if (((flags & GOVD_LASTPRIVATE)
8872        || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
8873       && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8874     omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8875 
8876   tree chain = *list_p;
8877   clause = build_omp_clause (input_location, code);
8878   OMP_CLAUSE_DECL (clause) = decl;
8879   OMP_CLAUSE_CHAIN (clause) = chain;
8880   if (private_debug)
8881     OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
8882   else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
8883     OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
8884   else if (code == OMP_CLAUSE_SHARED
8885 	   && (flags & GOVD_WRITTEN) == 0
8886 	   && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8887     OMP_CLAUSE_SHARED_READONLY (clause) = 1;
8888   else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
8889     OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
8890   else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
8891     {
8892       tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
8893       OMP_CLAUSE_DECL (nc) = decl;
8894       if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8895 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8896 	OMP_CLAUSE_DECL (clause)
8897 	  = build_simple_mem_ref_loc (input_location, decl);
8898       OMP_CLAUSE_DECL (clause)
8899 	= build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
8900 		  build_int_cst (build_pointer_type (char_type_node), 0));
8901       OMP_CLAUSE_SIZE (clause) = size_zero_node;
8902       OMP_CLAUSE_SIZE (nc) = size_zero_node;
8903       OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
8904       OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
8905       OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8906       OMP_CLAUSE_CHAIN (nc) = chain;
8907       OMP_CLAUSE_CHAIN (clause) = nc;
8908       struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8909       gimplify_omp_ctxp = ctx->outer_context;
8910       gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
8911 		     pre_p, NULL, is_gimple_val, fb_rvalue);
8912       gimplify_omp_ctxp = ctx;
8913     }
8914   else if (code == OMP_CLAUSE_MAP)
8915     {
8916       int kind;
8917       /* Not all combinations of these GOVD_MAP flags are actually valid.  */
8918       switch (flags & (GOVD_MAP_TO_ONLY
8919 		       | GOVD_MAP_FORCE
8920 		       | GOVD_MAP_FORCE_PRESENT))
8921 	{
8922 	case 0:
8923 	  kind = GOMP_MAP_TOFROM;
8924 	  break;
8925 	case GOVD_MAP_FORCE:
8926 	  kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
8927 	  break;
8928 	case GOVD_MAP_TO_ONLY:
8929 	  kind = GOMP_MAP_TO;
8930 	  break;
8931 	case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
8932 	  kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
8933 	  break;
8934 	case GOVD_MAP_FORCE_PRESENT:
8935 	  kind = GOMP_MAP_FORCE_PRESENT;
8936 	  break;
8937 	default:
8938 	  gcc_unreachable ();
8939 	}
8940       OMP_CLAUSE_SET_MAP_KIND (clause, kind);
8941       if (DECL_SIZE (decl)
8942 	  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8943 	{
8944 	  tree decl2 = DECL_VALUE_EXPR (decl);
8945 	  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8946 	  decl2 = TREE_OPERAND (decl2, 0);
8947 	  gcc_assert (DECL_P (decl2));
8948 	  tree mem = build_simple_mem_ref (decl2);
8949 	  OMP_CLAUSE_DECL (clause) = mem;
8950 	  OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8951 	  if (gimplify_omp_ctxp->outer_context)
8952 	    {
8953 	      struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8954 	      omp_notice_variable (ctx, decl2, true);
8955 	      omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
8956 	    }
8957 	  tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8958 				      OMP_CLAUSE_MAP);
8959 	  OMP_CLAUSE_DECL (nc) = decl;
8960 	  OMP_CLAUSE_SIZE (nc) = size_zero_node;
8961 	  if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
8962 	    OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8963 	  else
8964 	    OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8965 	  OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8966 	  OMP_CLAUSE_CHAIN (clause) = nc;
8967 	}
8968       else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8969 	       && lang_hooks.decls.omp_privatize_by_reference (decl))
8970 	{
8971 	  OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
8972 	  OMP_CLAUSE_SIZE (clause)
8973 	    = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
8974 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8975 	  gimplify_omp_ctxp = ctx->outer_context;
8976 	  gimplify_expr (&OMP_CLAUSE_SIZE (clause),
8977 			 pre_p, NULL, is_gimple_val, fb_rvalue);
8978 	  gimplify_omp_ctxp = ctx;
8979 	  tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8980 				      OMP_CLAUSE_MAP);
8981 	  OMP_CLAUSE_DECL (nc) = decl;
8982 	  OMP_CLAUSE_SIZE (nc) = size_zero_node;
8983 	  OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8984 	  OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8985 	  OMP_CLAUSE_CHAIN (clause) = nc;
8986 	}
8987       else
8988 	OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
8989     }
8990   if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
8991     {
8992       tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
8993       OMP_CLAUSE_DECL (nc) = decl;
8994       OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
8995       OMP_CLAUSE_CHAIN (nc) = chain;
8996       OMP_CLAUSE_CHAIN (clause) = nc;
8997       struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8998       gimplify_omp_ctxp = ctx->outer_context;
8999       lang_hooks.decls.omp_finish_clause (nc, pre_p);
9000       gimplify_omp_ctxp = ctx;
9001     }
9002   *list_p = clause;
9003   struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9004   gimplify_omp_ctxp = ctx->outer_context;
9005   lang_hooks.decls.omp_finish_clause (clause, pre_p);
9006   if (gimplify_omp_ctxp)
9007     for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
9008       if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
9009 	  && DECL_P (OMP_CLAUSE_SIZE (clause)))
9010 	omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
9011 			     true);
9012   gimplify_omp_ctxp = ctx;
9013   return 0;
9014 }
9015 
9016 static void
9017 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
9018 			     enum tree_code code)
9019 {
9020   struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9021   tree c, decl;
9022 
9023   if (body)
9024     {
9025       struct gimplify_omp_ctx *octx;
9026       for (octx = ctx; octx; octx = octx->outer_context)
9027 	if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
9028 	  break;
9029       if (octx)
9030 	{
9031 	  struct walk_stmt_info wi;
9032 	  memset (&wi, 0, sizeof (wi));
9033 	  walk_gimple_seq (body, omp_find_stores_stmt,
9034 			   omp_find_stores_op, &wi);
9035 	}
9036     }
9037 
9038   if (ctx->add_safelen1)
9039     {
9040       /* If there are VLAs in the body of simd loop, prevent
9041 	 vectorization.  */
9042       gcc_assert (ctx->region_type == ORT_SIMD);
9043       c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
9044       OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
9045       OMP_CLAUSE_CHAIN (c) = *list_p;
9046       *list_p = c;
9047       list_p = &OMP_CLAUSE_CHAIN (c);
9048     }
9049 
9050   while ((c = *list_p) != NULL)
9051     {
9052       splay_tree_node n;
9053       bool remove = false;
9054 
9055       switch (OMP_CLAUSE_CODE (c))
9056 	{
9057 	case OMP_CLAUSE_FIRSTPRIVATE:
9058 	  if ((ctx->region_type & ORT_TARGET)
9059 	      && (ctx->region_type & ORT_ACC) == 0
9060 	      && TYPE_ATOMIC (strip_array_types
9061 					(TREE_TYPE (OMP_CLAUSE_DECL (c)))))
9062 	    {
9063 	      error_at (OMP_CLAUSE_LOCATION (c),
9064 			"%<_Atomic%> %qD in %<firstprivate%> clause on "
9065 			"%<target%> construct", OMP_CLAUSE_DECL (c));
9066 	      remove = true;
9067 	      break;
9068 	    }
9069 	  /* FALLTHRU */
9070 	case OMP_CLAUSE_PRIVATE:
9071 	case OMP_CLAUSE_SHARED:
9072 	case OMP_CLAUSE_LINEAR:
9073 	  decl = OMP_CLAUSE_DECL (c);
9074 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9075 	  remove = !(n->value & GOVD_SEEN);
9076 	  if (! remove)
9077 	    {
9078 	      bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
9079 	      if ((n->value & GOVD_DEBUG_PRIVATE)
9080 		  || lang_hooks.decls.omp_private_debug_clause (decl, shared))
9081 		{
9082 		  gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
9083 			      || ((n->value & GOVD_DATA_SHARE_CLASS)
9084 				  == GOVD_SHARED));
9085 		  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
9086 		  OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
9087 		}
9088 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9089 		  && (n->value & GOVD_WRITTEN) == 0
9090 		  && DECL_P (decl)
9091 		  && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9092 		OMP_CLAUSE_SHARED_READONLY (c) = 1;
9093 	      else if (DECL_P (decl)
9094 		       && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9095 			    && (n->value & GOVD_WRITTEN) != 0)
9096 			   || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9097 			       && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
9098 		       && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9099 		omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9100 	    }
9101 	  break;
9102 
9103 	case OMP_CLAUSE_LASTPRIVATE:
9104 	  /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
9105 	     accurately reflect the presence of a FIRSTPRIVATE clause.  */
9106 	  decl = OMP_CLAUSE_DECL (c);
9107 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9108 	  OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
9109 	    = (n->value & GOVD_FIRSTPRIVATE) != 0;
9110 	  if (code == OMP_DISTRIBUTE
9111 	      && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
9112 	    {
9113 	      remove = true;
9114 	      error_at (OMP_CLAUSE_LOCATION (c),
9115 			"same variable used in %<firstprivate%> and "
9116 			"%<lastprivate%> clauses on %<distribute%> "
9117 			"construct");
9118 	    }
9119 	  if (!remove
9120 	      && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9121 	      && DECL_P (decl)
9122 	      && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9123 	    omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9124 	  break;
9125 
9126 	case OMP_CLAUSE_ALIGNED:
9127 	  decl = OMP_CLAUSE_DECL (c);
9128 	  if (!is_global_var (decl))
9129 	    {
9130 	      n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9131 	      remove = n == NULL || !(n->value & GOVD_SEEN);
9132 	      if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9133 		{
9134 		  struct gimplify_omp_ctx *octx;
9135 		  if (n != NULL
9136 		      && (n->value & (GOVD_DATA_SHARE_CLASS
9137 				      & ~GOVD_FIRSTPRIVATE)))
9138 		    remove = true;
9139 		  else
9140 		    for (octx = ctx->outer_context; octx;
9141 			 octx = octx->outer_context)
9142 		      {
9143 			n = splay_tree_lookup (octx->variables,
9144 					       (splay_tree_key) decl);
9145 			if (n == NULL)
9146 			  continue;
9147 			if (n->value & GOVD_LOCAL)
9148 			  break;
9149 			/* We have to avoid assigning a shared variable
9150 			   to itself when trying to add
9151 			   __builtin_assume_aligned.  */
9152 			if (n->value & GOVD_SHARED)
9153 			  {
9154 			    remove = true;
9155 			    break;
9156 			  }
9157 		      }
9158 		}
9159 	    }
9160 	  else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
9161 	    {
9162 	      n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9163 	      if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9164 		remove = true;
9165 	    }
9166 	  break;
9167 
9168 	case OMP_CLAUSE_MAP:
9169 	  if (code == OMP_TARGET_EXIT_DATA
9170 	      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
9171 	    {
9172 	      remove = true;
9173 	      break;
9174 	    }
9175 	  decl = OMP_CLAUSE_DECL (c);
9176 	  /* Data clauses associated with acc parallel reductions must be
9177 	     compatible with present_or_copy.  Warn and adjust the clause
9178 	     if that is not the case.  */
9179 	  if (ctx->region_type == ORT_ACC_PARALLEL)
9180 	    {
9181 	      tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
9182 	      n = NULL;
9183 
9184 	      if (DECL_P (t))
9185 		n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9186 
9187 	      if (n && (n->value & GOVD_REDUCTION))
9188 		{
9189 		  enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9190 
9191 		  OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9192 		  if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9193 		      && kind != GOMP_MAP_FORCE_PRESENT
9194 		      && kind != GOMP_MAP_POINTER)
9195 		    {
9196 		      warning_at (OMP_CLAUSE_LOCATION (c), 0,
9197 				  "incompatible data clause with reduction "
9198 				  "on %qE; promoting to present_or_copy",
9199 				  DECL_NAME (t));
9200 		      OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9201 		    }
9202 		}
9203 	    }
9204 	  if (!DECL_P (decl))
9205 	    {
9206 	      if ((ctx->region_type & ORT_TARGET) != 0
9207 		  && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9208 		{
9209 		  if (TREE_CODE (decl) == INDIRECT_REF
9210 		      && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9211 		      && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9212 			  == REFERENCE_TYPE))
9213 		    decl = TREE_OPERAND (decl, 0);
9214 		  if (TREE_CODE (decl) == COMPONENT_REF)
9215 		    {
9216 		      while (TREE_CODE (decl) == COMPONENT_REF)
9217 			decl = TREE_OPERAND (decl, 0);
9218 		      if (DECL_P (decl))
9219 			{
9220 			  n = splay_tree_lookup (ctx->variables,
9221 						 (splay_tree_key) decl);
9222 			  if (!(n->value & GOVD_SEEN))
9223 			    remove = true;
9224 			}
9225 		    }
9226 		}
9227 	      break;
9228 	    }
9229 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9230 	  if ((ctx->region_type & ORT_TARGET) != 0
9231 	      && !(n->value & GOVD_SEEN)
9232 	      && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
9233 	      && (!is_global_var (decl)
9234 		  || !lookup_attribute ("omp declare target link",
9235 					DECL_ATTRIBUTES (decl))))
9236 	    {
9237 	      remove = true;
9238 	      /* For struct element mapping, if struct is never referenced
9239 		 in target block and none of the mapping has always modifier,
9240 		 remove all the struct element mappings, which immediately
9241 		 follow the GOMP_MAP_STRUCT map clause.  */
9242 	      if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9243 		{
9244 		  HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9245 		  while (cnt--)
9246 		    OMP_CLAUSE_CHAIN (c)
9247 		      = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9248 		}
9249 	    }
9250 	  else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9251 		   && code == OMP_TARGET_EXIT_DATA)
9252 	    remove = true;
9253 	  else if (DECL_SIZE (decl)
9254 		   && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9255 		   && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9256 		   && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9257 		   && (OMP_CLAUSE_MAP_KIND (c)
9258 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9259 	    {
9260 	      /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9261 		 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9262 		 INTEGER_CST.  */
9263 	      gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9264 
9265 	      tree decl2 = DECL_VALUE_EXPR (decl);
9266 	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9267 	      decl2 = TREE_OPERAND (decl2, 0);
9268 	      gcc_assert (DECL_P (decl2));
9269 	      tree mem = build_simple_mem_ref (decl2);
9270 	      OMP_CLAUSE_DECL (c) = mem;
9271 	      OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9272 	      if (ctx->outer_context)
9273 		{
9274 		  omp_notice_variable (ctx->outer_context, decl2, true);
9275 		  omp_notice_variable (ctx->outer_context,
9276 				       OMP_CLAUSE_SIZE (c), true);
9277 		}
9278 	      if (((ctx->region_type & ORT_TARGET) != 0
9279 		   || !ctx->target_firstprivatize_array_bases)
9280 		  && ((n->value & GOVD_SEEN) == 0
9281 		      || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9282 		{
9283 		  tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9284 					      OMP_CLAUSE_MAP);
9285 		  OMP_CLAUSE_DECL (nc) = decl;
9286 		  OMP_CLAUSE_SIZE (nc) = size_zero_node;
9287 		  if (ctx->target_firstprivatize_array_bases)
9288 		    OMP_CLAUSE_SET_MAP_KIND (nc,
9289 					     GOMP_MAP_FIRSTPRIVATE_POINTER);
9290 		  else
9291 		    OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9292 		  OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9293 		  OMP_CLAUSE_CHAIN (c) = nc;
9294 		  c = nc;
9295 		}
9296 	    }
9297 	  else
9298 	    {
9299 	      if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9300 		OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9301 	      gcc_assert ((n->value & GOVD_SEEN) == 0
9302 			  || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9303 			      == 0));
9304 	    }
9305 	  break;
9306 
9307 	case OMP_CLAUSE_TO:
9308 	case OMP_CLAUSE_FROM:
9309 	case OMP_CLAUSE__CACHE_:
9310 	  decl = OMP_CLAUSE_DECL (c);
9311 	  if (!DECL_P (decl))
9312 	    break;
9313 	  if (DECL_SIZE (decl)
9314 	      && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9315 	    {
9316 	      tree decl2 = DECL_VALUE_EXPR (decl);
9317 	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9318 	      decl2 = TREE_OPERAND (decl2, 0);
9319 	      gcc_assert (DECL_P (decl2));
9320 	      tree mem = build_simple_mem_ref (decl2);
9321 	      OMP_CLAUSE_DECL (c) = mem;
9322 	      OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9323 	      if (ctx->outer_context)
9324 		{
9325 		  omp_notice_variable (ctx->outer_context, decl2, true);
9326 		  omp_notice_variable (ctx->outer_context,
9327 				       OMP_CLAUSE_SIZE (c), true);
9328 		}
9329 	    }
9330 	  else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9331 	    OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9332 	  break;
9333 
9334 	case OMP_CLAUSE_REDUCTION:
9335 	  decl = OMP_CLAUSE_DECL (c);
9336 	  /* OpenACC reductions need a present_or_copy data clause.
9337 	     Add one if necessary.  Error is the reduction is private.  */
9338 	  if (ctx->region_type == ORT_ACC_PARALLEL)
9339 	    {
9340 	      n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9341 	      if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9342 		error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9343 			  "reduction on %qE", DECL_NAME (decl));
9344 	      else if ((n->value & GOVD_MAP) == 0)
9345 		{
9346 		  tree next = OMP_CLAUSE_CHAIN (c);
9347 		  tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9348 		  OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9349 		  OMP_CLAUSE_DECL (nc) = decl;
9350 		  OMP_CLAUSE_CHAIN (c) = nc;
9351 		  lang_hooks.decls.omp_finish_clause (nc, pre_p);
9352 		  while (1)
9353 		    {
9354 		      OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9355 		      if (OMP_CLAUSE_CHAIN (nc) == NULL)
9356 			break;
9357 		      nc = OMP_CLAUSE_CHAIN (nc);
9358 		    }
9359 		  OMP_CLAUSE_CHAIN (nc) = next;
9360 		  n->value |= GOVD_MAP;
9361 		}
9362 	    }
9363 	  if (DECL_P (decl)
9364 	      && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9365 	    omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9366 	  break;
9367 	case OMP_CLAUSE_COPYIN:
9368 	case OMP_CLAUSE_COPYPRIVATE:
9369 	case OMP_CLAUSE_IF:
9370 	case OMP_CLAUSE_NUM_THREADS:
9371 	case OMP_CLAUSE_NUM_TEAMS:
9372 	case OMP_CLAUSE_THREAD_LIMIT:
9373 	case OMP_CLAUSE_DIST_SCHEDULE:
9374 	case OMP_CLAUSE_DEVICE:
9375 	case OMP_CLAUSE_SCHEDULE:
9376 	case OMP_CLAUSE_NOWAIT:
9377 	case OMP_CLAUSE_ORDERED:
9378 	case OMP_CLAUSE_DEFAULT:
9379 	case OMP_CLAUSE_UNTIED:
9380 	case OMP_CLAUSE_COLLAPSE:
9381 	case OMP_CLAUSE_FINAL:
9382 	case OMP_CLAUSE_MERGEABLE:
9383 	case OMP_CLAUSE_PROC_BIND:
9384 	case OMP_CLAUSE_SAFELEN:
9385 	case OMP_CLAUSE_SIMDLEN:
9386 	case OMP_CLAUSE_DEPEND:
9387 	case OMP_CLAUSE_PRIORITY:
9388 	case OMP_CLAUSE_GRAINSIZE:
9389 	case OMP_CLAUSE_NUM_TASKS:
9390 	case OMP_CLAUSE_NOGROUP:
9391 	case OMP_CLAUSE_THREADS:
9392 	case OMP_CLAUSE_SIMD:
9393 	case OMP_CLAUSE_HINT:
9394 	case OMP_CLAUSE_DEFAULTMAP:
9395 	case OMP_CLAUSE_USE_DEVICE_PTR:
9396 	case OMP_CLAUSE_IS_DEVICE_PTR:
9397 	case OMP_CLAUSE_ASYNC:
9398 	case OMP_CLAUSE_WAIT:
9399 	case OMP_CLAUSE_INDEPENDENT:
9400 	case OMP_CLAUSE_NUM_GANGS:
9401 	case OMP_CLAUSE_NUM_WORKERS:
9402 	case OMP_CLAUSE_VECTOR_LENGTH:
9403 	case OMP_CLAUSE_GANG:
9404 	case OMP_CLAUSE_WORKER:
9405 	case OMP_CLAUSE_VECTOR:
9406 	case OMP_CLAUSE_AUTO:
9407 	case OMP_CLAUSE_SEQ:
9408 	case OMP_CLAUSE_TILE:
9409 	  break;
9410 
9411 	default:
9412 	  gcc_unreachable ();
9413 	}
9414 
9415       if (remove)
9416 	*list_p = OMP_CLAUSE_CHAIN (c);
9417       else
9418 	list_p = &OMP_CLAUSE_CHAIN (c);
9419     }
9420 
9421   /* Add in any implicit data sharing.  */
9422   struct gimplify_adjust_omp_clauses_data data;
9423   data.list_p = list_p;
9424   data.pre_p = pre_p;
9425   splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
9426 
9427   gimplify_omp_ctxp = ctx->outer_context;
9428   delete_omp_context (ctx);
9429 }
9430 
9431 /* Gimplify OACC_CACHE.  */
9432 
9433 static void
9434 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
9435 {
9436   tree expr = *expr_p;
9437 
9438   gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
9439 			     OACC_CACHE);
9440   gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
9441 			       OACC_CACHE);
9442 
9443   /* TODO: Do something sensible with this information.  */
9444 
9445   *expr_p = NULL_TREE;
9446 }
9447 
9448 /* Helper function of gimplify_oacc_declare.  The helper's purpose is to,
9449    if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9450    kind.  The entry kind will replace the one in CLAUSE, while the exit
9451    kind will be used in a new omp_clause and returned to the caller.  */
9452 
9453 static tree
9454 gimplify_oacc_declare_1 (tree clause)
9455 {
9456   HOST_WIDE_INT kind, new_op;
9457   bool ret = false;
9458   tree c = NULL;
9459 
9460   kind = OMP_CLAUSE_MAP_KIND (clause);
9461 
9462   switch (kind)
9463     {
9464       case GOMP_MAP_ALLOC:
9465       case GOMP_MAP_FORCE_ALLOC:
9466       case GOMP_MAP_FORCE_TO:
9467 	new_op = GOMP_MAP_DELETE;
9468 	ret = true;
9469 	break;
9470 
9471       case GOMP_MAP_FORCE_FROM:
9472 	OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9473 	new_op = GOMP_MAP_FORCE_FROM;
9474 	ret = true;
9475 	break;
9476 
9477       case GOMP_MAP_FORCE_TOFROM:
9478 	OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
9479 	new_op = GOMP_MAP_FORCE_FROM;
9480 	ret = true;
9481 	break;
9482 
9483       case GOMP_MAP_FROM:
9484 	OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9485 	new_op = GOMP_MAP_FROM;
9486 	ret = true;
9487 	break;
9488 
9489       case GOMP_MAP_TOFROM:
9490 	OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
9491 	new_op = GOMP_MAP_FROM;
9492 	ret = true;
9493 	break;
9494 
9495       case GOMP_MAP_DEVICE_RESIDENT:
9496       case GOMP_MAP_FORCE_DEVICEPTR:
9497       case GOMP_MAP_FORCE_PRESENT:
9498       case GOMP_MAP_LINK:
9499       case GOMP_MAP_POINTER:
9500       case GOMP_MAP_TO:
9501 	break;
9502 
9503       default:
9504 	gcc_unreachable ();
9505 	break;
9506     }
9507 
9508   if (ret)
9509     {
9510       c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
9511       OMP_CLAUSE_SET_MAP_KIND (c, new_op);
9512       OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
9513     }
9514 
9515   return c;
9516 }
9517 
9518 /* Gimplify OACC_DECLARE.  */
9519 
9520 static void
9521 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
9522 {
9523   tree expr = *expr_p;
9524   gomp_target *stmt;
9525   tree clauses, t, decl;
9526 
9527   clauses = OACC_DECLARE_CLAUSES (expr);
9528 
9529   gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
9530   gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
9531 
9532   for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
9533     {
9534       decl = OMP_CLAUSE_DECL (t);
9535 
9536       if (TREE_CODE (decl) == MEM_REF)
9537 	decl = TREE_OPERAND (decl, 0);
9538 
9539       if (VAR_P (decl) && !is_oacc_declared (decl))
9540 	{
9541 	  tree attr = get_identifier ("oacc declare target");
9542 	  DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
9543 					      DECL_ATTRIBUTES (decl));
9544 	}
9545 
9546       if (VAR_P (decl)
9547 	  && !is_global_var (decl)
9548 	  && DECL_CONTEXT (decl) == current_function_decl)
9549 	{
9550 	  tree c = gimplify_oacc_declare_1 (t);
9551 	  if (c)
9552 	    {
9553 	      if (oacc_declare_returns == NULL)
9554 		oacc_declare_returns = new hash_map<tree, tree>;
9555 
9556 	      oacc_declare_returns->put (decl, c);
9557 	    }
9558 	}
9559 
9560       if (gimplify_omp_ctxp)
9561 	omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
9562     }
9563 
9564   stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
9565 				  clauses);
9566 
9567   gimplify_seq_add_stmt (pre_p, stmt);
9568 
9569   *expr_p = NULL_TREE;
9570 }
9571 
9572 /* Gimplify the contents of an OMP_PARALLEL statement.  This involves
9573    gimplification of the body, as well as scanning the body for used
9574    variables.  We need to do this scan now, because variable-sized
9575    decls will be decomposed during gimplification.  */
9576 
9577 static void
9578 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
9579 {
9580   tree expr = *expr_p;
9581   gimple *g;
9582   gimple_seq body = NULL;
9583 
9584   gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
9585 			     OMP_PARALLEL_COMBINED (expr)
9586 			     ? ORT_COMBINED_PARALLEL
9587 			     : ORT_PARALLEL, OMP_PARALLEL);
9588 
9589   push_gimplify_context ();
9590 
9591   g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
9592   if (gimple_code (g) == GIMPLE_BIND)
9593     pop_gimplify_context (g);
9594   else
9595     pop_gimplify_context (NULL);
9596 
9597   gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
9598 			       OMP_PARALLEL);
9599 
9600   g = gimple_build_omp_parallel (body,
9601 				 OMP_PARALLEL_CLAUSES (expr),
9602 				 NULL_TREE, NULL_TREE);
9603   if (OMP_PARALLEL_COMBINED (expr))
9604     gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
9605   gimplify_seq_add_stmt (pre_p, g);
9606   *expr_p = NULL_TREE;
9607 }
9608 
9609 /* Gimplify the contents of an OMP_TASK statement.  This involves
9610    gimplification of the body, as well as scanning the body for used
9611    variables.  We need to do this scan now, because variable-sized
9612    decls will be decomposed during gimplification.  */
9613 
9614 static void
9615 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
9616 {
9617   tree expr = *expr_p;
9618   gimple *g;
9619   gimple_seq body = NULL;
9620 
9621   gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
9622 			     omp_find_clause (OMP_TASK_CLAUSES (expr),
9623 					      OMP_CLAUSE_UNTIED)
9624 			     ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
9625 
9626   push_gimplify_context ();
9627 
9628   g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
9629   if (gimple_code (g) == GIMPLE_BIND)
9630     pop_gimplify_context (g);
9631   else
9632     pop_gimplify_context (NULL);
9633 
9634   gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
9635 			       OMP_TASK);
9636 
9637   g = gimple_build_omp_task (body,
9638 			     OMP_TASK_CLAUSES (expr),
9639 			     NULL_TREE, NULL_TREE,
9640 			     NULL_TREE, NULL_TREE, NULL_TREE);
9641   gimplify_seq_add_stmt (pre_p, g);
9642   *expr_p = NULL_TREE;
9643 }
9644 
9645 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9646    with non-NULL OMP_FOR_INIT.  */
9647 
9648 static tree
9649 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
9650 {
9651   *walk_subtrees = 0;
9652   switch (TREE_CODE (*tp))
9653     {
9654     case OMP_FOR:
9655       *walk_subtrees = 1;
9656       /* FALLTHRU */
9657     case OMP_SIMD:
9658       if (OMP_FOR_INIT (*tp) != NULL_TREE)
9659 	return *tp;
9660       break;
9661     case BIND_EXPR:
9662     case STATEMENT_LIST:
9663     case OMP_PARALLEL:
9664       *walk_subtrees = 1;
9665       break;
9666     default:
9667       break;
9668     }
9669   return NULL_TREE;
9670 }
9671 
9672 /* Gimplify the gross structure of an OMP_FOR statement.  */
9673 
9674 static enum gimplify_status
9675 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
9676 {
9677   tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
9678   enum gimplify_status ret = GS_ALL_DONE;
9679   enum gimplify_status tret;
9680   gomp_for *gfor;
9681   gimple_seq for_body, for_pre_body;
9682   int i;
9683   bitmap has_decl_expr = NULL;
9684   enum omp_region_type ort = ORT_WORKSHARE;
9685 
9686   orig_for_stmt = for_stmt = *expr_p;
9687 
9688   switch (TREE_CODE (for_stmt))
9689     {
9690     case OMP_FOR:
9691     case OMP_DISTRIBUTE:
9692       break;
9693     case OACC_LOOP:
9694       ort = ORT_ACC;
9695       break;
9696     case OMP_TASKLOOP:
9697       if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
9698 	ort = ORT_UNTIED_TASK;
9699       else
9700 	ort = ORT_TASK;
9701       break;
9702     case OMP_SIMD:
9703       ort = ORT_SIMD;
9704       break;
9705     default:
9706       gcc_unreachable ();
9707     }
9708 
9709   /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9710      clause for the IV.  */
9711   if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9712     {
9713       t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
9714       gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9715       decl = TREE_OPERAND (t, 0);
9716       for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9717 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9718 	    && OMP_CLAUSE_DECL (c) == decl)
9719 	  {
9720 	    OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9721 	    break;
9722 	  }
9723     }
9724 
9725   if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9726     {
9727       gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
9728       inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
9729 				  find_combined_omp_for, NULL, NULL);
9730       if (inner_for_stmt == NULL_TREE)
9731 	{
9732 	  gcc_assert (seen_error ());
9733 	  *expr_p = NULL_TREE;
9734 	  return GS_ERROR;
9735 	}
9736     }
9737 
9738   if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
9739     gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
9740 			       TREE_CODE (for_stmt));
9741 
9742   if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
9743     gimplify_omp_ctxp->distribute = true;
9744 
9745   /* Handle OMP_FOR_INIT.  */
9746   for_pre_body = NULL;
9747   if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
9748     {
9749       has_decl_expr = BITMAP_ALLOC (NULL);
9750       if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
9751 	  && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
9752 	     == VAR_DECL)
9753 	{
9754 	  t = OMP_FOR_PRE_BODY (for_stmt);
9755 	  bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9756 	}
9757       else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
9758 	{
9759 	  tree_stmt_iterator si;
9760 	  for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
9761 	       tsi_next (&si))
9762 	    {
9763 	      t = tsi_stmt (si);
9764 	      if (TREE_CODE (t) == DECL_EXPR
9765 		  && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
9766 		bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9767 	    }
9768 	}
9769     }
9770   if (OMP_FOR_PRE_BODY (for_stmt))
9771     {
9772       if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
9773 	gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9774       else
9775 	{
9776 	  struct gimplify_omp_ctx ctx;
9777 	  memset (&ctx, 0, sizeof (ctx));
9778 	  ctx.region_type = ORT_NONE;
9779 	  gimplify_omp_ctxp = &ctx;
9780 	  gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9781 	  gimplify_omp_ctxp = NULL;
9782 	}
9783     }
9784   OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
9785 
9786   if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9787     for_stmt = inner_for_stmt;
9788 
9789   /* For taskloop, need to gimplify the start, end and step before the
9790      taskloop, outside of the taskloop omp context.  */
9791   if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9792     {
9793       for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9794 	{
9795 	  t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9796 	  if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9797 	    {
9798 	      tree type = TREE_TYPE (TREE_OPERAND (t, 0));
9799 	      TREE_OPERAND (t, 1)
9800 		= get_initialized_tmp_var (TREE_OPERAND (t, 1),
9801 					   gimple_seq_empty_p (for_pre_body)
9802 					   ? pre_p : &for_pre_body, NULL,
9803 					   false);
9804 	      /* Reference to pointer conversion is considered useless,
9805 		 but is significant for firstprivate clause.  Force it
9806 		 here.  */
9807 	      if (TREE_CODE (type) == POINTER_TYPE
9808 		  && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
9809 		      == REFERENCE_TYPE))
9810 		{
9811 		  tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
9812 		  tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
9813 				   TREE_OPERAND (t, 1));
9814 		  gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
9815 				       ? pre_p : &for_pre_body);
9816 		  TREE_OPERAND (t, 1) = v;
9817 		}
9818 	      tree c = build_omp_clause (input_location,
9819 					 OMP_CLAUSE_FIRSTPRIVATE);
9820 	      OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9821 	      OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9822 	      OMP_FOR_CLAUSES (orig_for_stmt) = c;
9823 	    }
9824 
9825 	  /* Handle OMP_FOR_COND.  */
9826 	  t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9827 	  if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9828 	    {
9829 	      tree type = TREE_TYPE (TREE_OPERAND (t, 0));
9830 	      TREE_OPERAND (t, 1)
9831 		= get_initialized_tmp_var (TREE_OPERAND (t, 1),
9832 					   gimple_seq_empty_p (for_pre_body)
9833 					   ? pre_p : &for_pre_body, NULL,
9834 					   false);
9835 	      /* Reference to pointer conversion is considered useless,
9836 		 but is significant for firstprivate clause.  Force it
9837 		 here.  */
9838 	      if (TREE_CODE (type) == POINTER_TYPE
9839 		  && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
9840 		      == REFERENCE_TYPE))
9841 		{
9842 		  tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
9843 		  tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
9844 				   TREE_OPERAND (t, 1));
9845 		  gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
9846 				       ? pre_p : &for_pre_body);
9847 		  TREE_OPERAND (t, 1) = v;
9848 		}
9849 	      tree c = build_omp_clause (input_location,
9850 					 OMP_CLAUSE_FIRSTPRIVATE);
9851 	      OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9852 	      OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9853 	      OMP_FOR_CLAUSES (orig_for_stmt) = c;
9854 	    }
9855 
9856 	  /* Handle OMP_FOR_INCR.  */
9857 	  t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9858 	  if (TREE_CODE (t) == MODIFY_EXPR)
9859 	    {
9860 	      decl = TREE_OPERAND (t, 0);
9861 	      t = TREE_OPERAND (t, 1);
9862 	      tree *tp = &TREE_OPERAND (t, 1);
9863 	      if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
9864 		tp = &TREE_OPERAND (t, 0);
9865 
9866 	      if (!is_gimple_constant (*tp))
9867 		{
9868 		  gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
9869 				    ? pre_p : &for_pre_body;
9870 		  *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
9871 		  tree c = build_omp_clause (input_location,
9872 					     OMP_CLAUSE_FIRSTPRIVATE);
9873 		  OMP_CLAUSE_DECL (c) = *tp;
9874 		  OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9875 		  OMP_FOR_CLAUSES (orig_for_stmt) = c;
9876 		}
9877 	    }
9878 	}
9879 
9880       gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
9881 				 OMP_TASKLOOP);
9882     }
9883 
9884   if (orig_for_stmt != for_stmt)
9885     gimplify_omp_ctxp->combined_loop = true;
9886 
9887   for_body = NULL;
9888   gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9889 	      == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
9890   gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9891 	      == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
9892 
9893   tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
9894   bool is_doacross = false;
9895   if (c && OMP_CLAUSE_ORDERED_EXPR (c))
9896     {
9897       is_doacross = true;
9898       gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
9899 						 (OMP_FOR_INIT (for_stmt))
9900 					       * 2);
9901     }
9902   int collapse = 1, tile = 0;
9903   c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
9904   if (c)
9905     collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
9906   c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
9907   if (c)
9908     tile = list_length (OMP_CLAUSE_TILE_LIST (c));
9909   for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9910     {
9911       t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9912       gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9913       decl = TREE_OPERAND (t, 0);
9914       gcc_assert (DECL_P (decl));
9915       gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
9916 		  || POINTER_TYPE_P (TREE_TYPE (decl)));
9917       if (is_doacross)
9918 	{
9919 	  if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
9920 	    gimplify_omp_ctxp->loop_iter_var.quick_push
9921 	      (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
9922 	  else
9923 	    gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9924 	  gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9925 	}
9926 
9927       /* Make sure the iteration variable is private.  */
9928       tree c = NULL_TREE;
9929       tree c2 = NULL_TREE;
9930       if (orig_for_stmt != for_stmt)
9931 	/* Do this only on innermost construct for combined ones.  */;
9932       else if (ort == ORT_SIMD)
9933 	{
9934 	  splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9935 						 (splay_tree_key) decl);
9936 	  omp_is_private (gimplify_omp_ctxp, decl,
9937 			  1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9938 			       != 1));
9939 	  if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9940 	    omp_notice_variable (gimplify_omp_ctxp, decl, true);
9941 	  else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9942 	    {
9943 	      c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9944 	      OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9945 	      unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
9946 	      if (has_decl_expr
9947 		  && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
9948 		{
9949 		  OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9950 		  flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9951 		}
9952 	      struct gimplify_omp_ctx *outer
9953 		= gimplify_omp_ctxp->outer_context;
9954 	      if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9955 		{
9956 		  if (outer->region_type == ORT_WORKSHARE
9957 		      && outer->combined_loop)
9958 		    {
9959 		      n = splay_tree_lookup (outer->variables,
9960 					     (splay_tree_key)decl);
9961 		      if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9962 			{
9963 			  OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9964 			  flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9965 			}
9966 		      else
9967 			{
9968 			  struct gimplify_omp_ctx *octx = outer->outer_context;
9969 			  if (octx
9970 			      && octx->region_type == ORT_COMBINED_PARALLEL
9971 			      && octx->outer_context
9972 			      && (octx->outer_context->region_type
9973 				  == ORT_WORKSHARE)
9974 			      && octx->outer_context->combined_loop)
9975 			    {
9976 			      octx = octx->outer_context;
9977 			      n = splay_tree_lookup (octx->variables,
9978 						     (splay_tree_key)decl);
9979 			      if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9980 				{
9981 				  OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9982 				  flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9983 				}
9984 			    }
9985 			}
9986 		    }
9987 		}
9988 
9989 	      OMP_CLAUSE_DECL (c) = decl;
9990 	      OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9991 	      OMP_FOR_CLAUSES (for_stmt) = c;
9992 	      omp_add_variable (gimplify_omp_ctxp, decl, flags);
9993 	      if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9994 		{
9995 		  if (outer->region_type == ORT_WORKSHARE
9996 		      && outer->combined_loop)
9997 		    {
9998 		      if (outer->outer_context
9999 			  && (outer->outer_context->region_type
10000 			      == ORT_COMBINED_PARALLEL))
10001 			outer = outer->outer_context;
10002 		      else if (omp_check_private (outer, decl, false))
10003 			outer = NULL;
10004 		    }
10005 		  else if (((outer->region_type & ORT_TASK) != 0)
10006 			   && outer->combined_loop
10007 			   && !omp_check_private (gimplify_omp_ctxp,
10008 						  decl, false))
10009 		    ;
10010 		  else if (outer->region_type != ORT_COMBINED_PARALLEL)
10011 		    {
10012 		      omp_notice_variable (outer, decl, true);
10013 		      outer = NULL;
10014 		    }
10015 		  if (outer)
10016 		    {
10017 		      n = splay_tree_lookup (outer->variables,
10018 					     (splay_tree_key)decl);
10019 		      if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10020 			{
10021 			  omp_add_variable (outer, decl,
10022 					    GOVD_LASTPRIVATE | GOVD_SEEN);
10023 			  if (outer->region_type == ORT_COMBINED_PARALLEL
10024 			      && outer->outer_context
10025 			      && (outer->outer_context->region_type
10026 				  == ORT_WORKSHARE)
10027 			      && outer->outer_context->combined_loop)
10028 			    {
10029 			      outer = outer->outer_context;
10030 			      n = splay_tree_lookup (outer->variables,
10031 						     (splay_tree_key)decl);
10032 			      if (omp_check_private (outer, decl, false))
10033 				outer = NULL;
10034 			      else if (n == NULL
10035 				       || ((n->value & GOVD_DATA_SHARE_CLASS)
10036 					   == 0))
10037 				omp_add_variable (outer, decl,
10038 						  GOVD_LASTPRIVATE
10039 						  | GOVD_SEEN);
10040 			      else
10041 				outer = NULL;
10042 			    }
10043 			  if (outer && outer->outer_context
10044 			      && (outer->outer_context->region_type
10045 				  == ORT_COMBINED_TEAMS))
10046 			    {
10047 			      outer = outer->outer_context;
10048 			      n = splay_tree_lookup (outer->variables,
10049 						     (splay_tree_key)decl);
10050 			      if (n == NULL
10051 				  || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10052 				omp_add_variable (outer, decl,
10053 						  GOVD_SHARED | GOVD_SEEN);
10054 			      else
10055 				outer = NULL;
10056 			    }
10057 			  if (outer && outer->outer_context)
10058 			    omp_notice_variable (outer->outer_context, decl,
10059 						 true);
10060 			}
10061 		    }
10062 		}
10063 	    }
10064 	  else
10065 	    {
10066 	      bool lastprivate
10067 		= (!has_decl_expr
10068 		   || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
10069 	      struct gimplify_omp_ctx *outer
10070 		= gimplify_omp_ctxp->outer_context;
10071 	      if (outer && lastprivate)
10072 		{
10073 		  if (outer->region_type == ORT_WORKSHARE
10074 		      && outer->combined_loop)
10075 		    {
10076 		      n = splay_tree_lookup (outer->variables,
10077 					     (splay_tree_key)decl);
10078 		      if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10079 			{
10080 			  lastprivate = false;
10081 			  outer = NULL;
10082 			}
10083 		      else if (outer->outer_context
10084 			       && (outer->outer_context->region_type
10085 				   == ORT_COMBINED_PARALLEL))
10086 			outer = outer->outer_context;
10087 		      else if (omp_check_private (outer, decl, false))
10088 			outer = NULL;
10089 		    }
10090 		  else if (((outer->region_type & ORT_TASK) != 0)
10091 			   && outer->combined_loop
10092 			   && !omp_check_private (gimplify_omp_ctxp,
10093 						  decl, false))
10094 		    ;
10095 		  else if (outer->region_type != ORT_COMBINED_PARALLEL)
10096 		    {
10097 		      omp_notice_variable (outer, decl, true);
10098 		      outer = NULL;
10099 		    }
10100 		  if (outer)
10101 		    {
10102 		      n = splay_tree_lookup (outer->variables,
10103 					     (splay_tree_key)decl);
10104 		      if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10105 			{
10106 			  omp_add_variable (outer, decl,
10107 					    GOVD_LASTPRIVATE | GOVD_SEEN);
10108 			  if (outer->region_type == ORT_COMBINED_PARALLEL
10109 			      && outer->outer_context
10110 			      && (outer->outer_context->region_type
10111 				  == ORT_WORKSHARE)
10112 			      && outer->outer_context->combined_loop)
10113 			    {
10114 			      outer = outer->outer_context;
10115 			      n = splay_tree_lookup (outer->variables,
10116 						     (splay_tree_key)decl);
10117 			      if (omp_check_private (outer, decl, false))
10118 				outer = NULL;
10119 			      else if (n == NULL
10120 				       || ((n->value & GOVD_DATA_SHARE_CLASS)
10121 					   == 0))
10122 				omp_add_variable (outer, decl,
10123 						  GOVD_LASTPRIVATE
10124 						  | GOVD_SEEN);
10125 			      else
10126 				outer = NULL;
10127 			    }
10128 			  if (outer && outer->outer_context
10129 			      && (outer->outer_context->region_type
10130 				  == ORT_COMBINED_TEAMS))
10131 			    {
10132 			      outer = outer->outer_context;
10133 			      n = splay_tree_lookup (outer->variables,
10134 						     (splay_tree_key)decl);
10135 			      if (n == NULL
10136 				  || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10137 				omp_add_variable (outer, decl,
10138 						  GOVD_SHARED | GOVD_SEEN);
10139 			      else
10140 				outer = NULL;
10141 			    }
10142 			  if (outer && outer->outer_context)
10143 			    omp_notice_variable (outer->outer_context, decl,
10144 						 true);
10145 			}
10146 		    }
10147 		}
10148 
10149 	      c = build_omp_clause (input_location,
10150 				    lastprivate ? OMP_CLAUSE_LASTPRIVATE
10151 						: OMP_CLAUSE_PRIVATE);
10152 	      OMP_CLAUSE_DECL (c) = decl;
10153 	      OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10154 	      OMP_FOR_CLAUSES (for_stmt) = c;
10155 	      omp_add_variable (gimplify_omp_ctxp, decl,
10156 				(lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
10157 				| GOVD_EXPLICIT | GOVD_SEEN);
10158 	      c = NULL_TREE;
10159 	    }
10160 	}
10161       else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
10162 	omp_notice_variable (gimplify_omp_ctxp, decl, true);
10163       else
10164 	omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
10165 
10166       /* If DECL is not a gimple register, create a temporary variable to act
10167 	 as an iteration counter.  This is valid, since DECL cannot be
10168 	 modified in the body of the loop.  Similarly for any iteration vars
10169 	 in simd with collapse > 1 where the iterator vars must be
10170 	 lastprivate.  */
10171       if (orig_for_stmt != for_stmt)
10172 	var = decl;
10173       else if (!is_gimple_reg (decl)
10174 	       || (ort == ORT_SIMD
10175 		   && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
10176 	{
10177 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10178 	  /* Make sure omp_add_variable is not called on it prematurely.
10179 	     We call it ourselves a few lines later.  */
10180 	  gimplify_omp_ctxp = NULL;
10181 	  var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10182 	  gimplify_omp_ctxp = ctx;
10183 	  TREE_OPERAND (t, 0) = var;
10184 
10185 	  gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
10186 
10187 	  if (ort == ORT_SIMD
10188 	      && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10189 	    {
10190 	      c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
10191 	      OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
10192 	      OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
10193 	      OMP_CLAUSE_DECL (c2) = var;
10194 	      OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
10195 	      OMP_FOR_CLAUSES (for_stmt) = c2;
10196 	      omp_add_variable (gimplify_omp_ctxp, var,
10197 				GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
10198 	      if (c == NULL_TREE)
10199 		{
10200 		  c = c2;
10201 		  c2 = NULL_TREE;
10202 		}
10203 	    }
10204 	  else
10205 	    omp_add_variable (gimplify_omp_ctxp, var,
10206 			      GOVD_PRIVATE | GOVD_SEEN);
10207 	}
10208       else
10209 	var = decl;
10210 
10211       tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10212 			    is_gimple_val, fb_rvalue, false);
10213       ret = MIN (ret, tret);
10214       if (ret == GS_ERROR)
10215 	return ret;
10216 
10217       /* Handle OMP_FOR_COND.  */
10218       t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10219       gcc_assert (COMPARISON_CLASS_P (t));
10220       gcc_assert (TREE_OPERAND (t, 0) == decl);
10221 
10222       tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10223 			    is_gimple_val, fb_rvalue, false);
10224       ret = MIN (ret, tret);
10225 
10226       /* Handle OMP_FOR_INCR.  */
10227       t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10228       switch (TREE_CODE (t))
10229 	{
10230 	case PREINCREMENT_EXPR:
10231 	case POSTINCREMENT_EXPR:
10232 	  {
10233 	    tree decl = TREE_OPERAND (t, 0);
10234 	    /* c_omp_for_incr_canonicalize_ptr() should have been
10235 	       called to massage things appropriately.  */
10236 	    gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10237 
10238 	    if (orig_for_stmt != for_stmt)
10239 	      break;
10240 	    t = build_int_cst (TREE_TYPE (decl), 1);
10241 	    if (c)
10242 	      OMP_CLAUSE_LINEAR_STEP (c) = t;
10243 	    t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10244 	    t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10245 	    TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10246 	    break;
10247 	  }
10248 
10249 	case PREDECREMENT_EXPR:
10250 	case POSTDECREMENT_EXPR:
10251 	  /* c_omp_for_incr_canonicalize_ptr() should have been
10252 	     called to massage things appropriately.  */
10253 	  gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10254 	  if (orig_for_stmt != for_stmt)
10255 	    break;
10256 	  t = build_int_cst (TREE_TYPE (decl), -1);
10257 	  if (c)
10258 	    OMP_CLAUSE_LINEAR_STEP (c) = t;
10259 	  t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10260 	  t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10261 	  TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10262 	  break;
10263 
10264 	case MODIFY_EXPR:
10265 	  gcc_assert (TREE_OPERAND (t, 0) == decl);
10266 	  TREE_OPERAND (t, 0) = var;
10267 
10268 	  t = TREE_OPERAND (t, 1);
10269 	  switch (TREE_CODE (t))
10270 	    {
10271 	    case PLUS_EXPR:
10272 	      if (TREE_OPERAND (t, 1) == decl)
10273 		{
10274 		  TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
10275 		  TREE_OPERAND (t, 0) = var;
10276 		  break;
10277 		}
10278 
10279 	      /* Fallthru.  */
10280 	    case MINUS_EXPR:
10281 	    case POINTER_PLUS_EXPR:
10282 	      gcc_assert (TREE_OPERAND (t, 0) == decl);
10283 	      TREE_OPERAND (t, 0) = var;
10284 	      break;
10285 	    default:
10286 	      gcc_unreachable ();
10287 	    }
10288 
10289 	  tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10290 				is_gimple_val, fb_rvalue, false);
10291 	  ret = MIN (ret, tret);
10292 	  if (c)
10293 	    {
10294 	      tree step = TREE_OPERAND (t, 1);
10295 	      tree stept = TREE_TYPE (decl);
10296 	      if (POINTER_TYPE_P (stept))
10297 		stept = sizetype;
10298 	      step = fold_convert (stept, step);
10299 	      if (TREE_CODE (t) == MINUS_EXPR)
10300 		step = fold_build1 (NEGATE_EXPR, stept, step);
10301 	      OMP_CLAUSE_LINEAR_STEP (c) = step;
10302 	      if (step != TREE_OPERAND (t, 1))
10303 		{
10304 		  tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
10305 					&for_pre_body, NULL,
10306 					is_gimple_val, fb_rvalue, false);
10307 		  ret = MIN (ret, tret);
10308 		}
10309 	    }
10310 	  break;
10311 
10312 	default:
10313 	  gcc_unreachable ();
10314 	}
10315 
10316       if (c2)
10317 	{
10318 	  gcc_assert (c);
10319 	  OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
10320 	}
10321 
10322       if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
10323 	{
10324 	  for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
10325 	    if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10326 		  && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
10327 		 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10328 		     && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
10329 		     && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
10330 		&& OMP_CLAUSE_DECL (c) == decl)
10331 	      {
10332 		if (is_doacross && (collapse == 1 || i >= collapse))
10333 		  t = var;
10334 		else
10335 		  {
10336 		    t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10337 		    gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10338 		    gcc_assert (TREE_OPERAND (t, 0) == var);
10339 		    t = TREE_OPERAND (t, 1);
10340 		    gcc_assert (TREE_CODE (t) == PLUS_EXPR
10341 				|| TREE_CODE (t) == MINUS_EXPR
10342 				|| TREE_CODE (t) == POINTER_PLUS_EXPR);
10343 		    gcc_assert (TREE_OPERAND (t, 0) == var);
10344 		    t = build2 (TREE_CODE (t), TREE_TYPE (decl),
10345 				is_doacross ? var : decl,
10346 				TREE_OPERAND (t, 1));
10347 		  }
10348 		gimple_seq *seq;
10349 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
10350 		  seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
10351 		else
10352 		  seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
10353 		push_gimplify_context ();
10354 		gimplify_assign (decl, t, seq);
10355 		gimple *bind = NULL;
10356 		if (gimplify_ctxp->temps)
10357 		  {
10358 		    bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
10359 		    *seq = NULL;
10360 		    gimplify_seq_add_stmt (seq, bind);
10361 		  }
10362 		pop_gimplify_context (bind);
10363 	      }
10364 	}
10365     }
10366 
10367   BITMAP_FREE (has_decl_expr);
10368 
10369   if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10370     {
10371       push_gimplify_context ();
10372       if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
10373 	{
10374 	  OMP_FOR_BODY (orig_for_stmt)
10375 	    = build3 (BIND_EXPR, void_type_node, NULL,
10376 		      OMP_FOR_BODY (orig_for_stmt), NULL);
10377 	  TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
10378 	}
10379     }
10380 
10381   gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
10382 					 &for_body);
10383 
10384   if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10385     {
10386       if (gimple_code (g) == GIMPLE_BIND)
10387 	pop_gimplify_context (g);
10388       else
10389 	pop_gimplify_context (NULL);
10390     }
10391 
10392   if (orig_for_stmt != for_stmt)
10393     for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10394       {
10395 	t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10396 	decl = TREE_OPERAND (t, 0);
10397 	struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10398 	if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10399 	  gimplify_omp_ctxp = ctx->outer_context;
10400 	var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10401 	gimplify_omp_ctxp = ctx;
10402 	omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
10403 	TREE_OPERAND (t, 0) = var;
10404 	t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10405 	TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
10406 	TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
10407       }
10408 
10409   gimplify_adjust_omp_clauses (pre_p, for_body,
10410 			       &OMP_FOR_CLAUSES (orig_for_stmt),
10411 			       TREE_CODE (orig_for_stmt));
10412 
10413   int kind;
10414   switch (TREE_CODE (orig_for_stmt))
10415     {
10416     case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
10417     case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
10418     case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
10419     case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
10420     case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
10421     default:
10422       gcc_unreachable ();
10423     }
10424   gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
10425 			       TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
10426 			       for_pre_body);
10427   if (orig_for_stmt != for_stmt)
10428     gimple_omp_for_set_combined_p (gfor, true);
10429   if (gimplify_omp_ctxp
10430       && (gimplify_omp_ctxp->combined_loop
10431 	  || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10432 	      && gimplify_omp_ctxp->outer_context
10433 	      && gimplify_omp_ctxp->outer_context->combined_loop)))
10434     {
10435       gimple_omp_for_set_combined_into_p (gfor, true);
10436       if (gimplify_omp_ctxp->combined_loop)
10437 	gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
10438       else
10439 	gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
10440     }
10441 
10442   for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10443     {
10444       t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10445       gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
10446       gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
10447       t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10448       gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
10449       gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
10450       t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10451       gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
10452     }
10453 
10454   /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10455      constructs with GIMPLE_OMP_TASK sandwiched in between them.
10456      The outer taskloop stands for computing the number of iterations,
10457      counts for collapsed loops and holding taskloop specific clauses.
10458      The task construct stands for the effect of data sharing on the
10459      explicit task it creates and the inner taskloop stands for expansion
10460      of the static loop inside of the explicit task construct.  */
10461   if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10462     {
10463       tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
10464       tree task_clauses = NULL_TREE;
10465       tree c = *gfor_clauses_ptr;
10466       tree *gtask_clauses_ptr = &task_clauses;
10467       tree outer_for_clauses = NULL_TREE;
10468       tree *gforo_clauses_ptr = &outer_for_clauses;
10469       for (; c; c = OMP_CLAUSE_CHAIN (c))
10470 	switch (OMP_CLAUSE_CODE (c))
10471 	  {
10472 	  /* These clauses are allowed on task, move them there.  */
10473 	  case OMP_CLAUSE_SHARED:
10474 	  case OMP_CLAUSE_FIRSTPRIVATE:
10475 	  case OMP_CLAUSE_DEFAULT:
10476 	  case OMP_CLAUSE_IF:
10477 	  case OMP_CLAUSE_UNTIED:
10478 	  case OMP_CLAUSE_FINAL:
10479 	  case OMP_CLAUSE_MERGEABLE:
10480 	  case OMP_CLAUSE_PRIORITY:
10481 	    *gtask_clauses_ptr = c;
10482 	    gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10483 	    break;
10484 	  case OMP_CLAUSE_PRIVATE:
10485 	    if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
10486 	      {
10487 		/* We want private on outer for and firstprivate
10488 		   on task.  */
10489 		*gtask_clauses_ptr
10490 		  = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10491 				      OMP_CLAUSE_FIRSTPRIVATE);
10492 		OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10493 		lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10494 		gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10495 		*gforo_clauses_ptr = c;
10496 		gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10497 	      }
10498 	    else
10499 	      {
10500 		*gtask_clauses_ptr = c;
10501 		gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10502 	      }
10503 	    break;
10504 	  /* These clauses go into outer taskloop clauses.  */
10505 	  case OMP_CLAUSE_GRAINSIZE:
10506 	  case OMP_CLAUSE_NUM_TASKS:
10507 	  case OMP_CLAUSE_NOGROUP:
10508 	    *gforo_clauses_ptr = c;
10509 	    gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10510 	    break;
10511 	  /* Taskloop clause we duplicate on both taskloops.  */
10512 	  case OMP_CLAUSE_COLLAPSE:
10513 	    *gfor_clauses_ptr = c;
10514 	    gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10515 	    *gforo_clauses_ptr = copy_node (c);
10516 	    gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10517 	    break;
10518 	  /* For lastprivate, keep the clause on inner taskloop, and add
10519 	     a shared clause on task.  If the same decl is also firstprivate,
10520 	     add also firstprivate clause on the inner taskloop.  */
10521 	  case OMP_CLAUSE_LASTPRIVATE:
10522 	    if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
10523 	      {
10524 		/* For taskloop C++ lastprivate IVs, we want:
10525 		   1) private on outer taskloop
10526 		   2) firstprivate and shared on task
10527 		   3) lastprivate on inner taskloop  */
10528 		*gtask_clauses_ptr
10529 		  = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10530 				      OMP_CLAUSE_FIRSTPRIVATE);
10531 		OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10532 		lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10533 		gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10534 		OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
10535 		*gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10536 						       OMP_CLAUSE_PRIVATE);
10537 		OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
10538 		OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
10539 		TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
10540 		gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10541 	      }
10542 	    *gfor_clauses_ptr = c;
10543 	    gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10544 	    *gtask_clauses_ptr
10545 	      = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
10546 	    OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10547 	    if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10548 	      OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
10549 	    gtask_clauses_ptr
10550 	      = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10551 	    break;
10552 	  default:
10553 	    gcc_unreachable ();
10554 	  }
10555       *gfor_clauses_ptr = NULL_TREE;
10556       *gtask_clauses_ptr = NULL_TREE;
10557       *gforo_clauses_ptr = NULL_TREE;
10558       g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
10559       g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
10560 				 NULL_TREE, NULL_TREE, NULL_TREE);
10561       gimple_omp_task_set_taskloop_p (g, true);
10562       g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
10563       gomp_for *gforo
10564 	= gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
10565 				gimple_omp_for_collapse (gfor),
10566 				gimple_omp_for_pre_body (gfor));
10567       gimple_omp_for_set_pre_body (gfor, NULL);
10568       gimple_omp_for_set_combined_p (gforo, true);
10569       gimple_omp_for_set_combined_into_p (gfor, true);
10570       for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
10571 	{
10572 	  tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
10573 	  tree v = create_tmp_var (type);
10574 	  gimple_omp_for_set_index (gforo, i, v);
10575 	  t = unshare_expr (gimple_omp_for_initial (gfor, i));
10576 	  gimple_omp_for_set_initial (gforo, i, t);
10577 	  gimple_omp_for_set_cond (gforo, i,
10578 				   gimple_omp_for_cond (gfor, i));
10579 	  t = unshare_expr (gimple_omp_for_final (gfor, i));
10580 	  gimple_omp_for_set_final (gforo, i, t);
10581 	  t = unshare_expr (gimple_omp_for_incr (gfor, i));
10582 	  gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
10583 	  TREE_OPERAND (t, 0) = v;
10584 	  gimple_omp_for_set_incr (gforo, i, t);
10585 	  t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
10586 	  OMP_CLAUSE_DECL (t) = v;
10587 	  OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
10588 	  gimple_omp_for_set_clauses (gforo, t);
10589 	}
10590       gimplify_seq_add_stmt (pre_p, gforo);
10591     }
10592   else
10593     gimplify_seq_add_stmt (pre_p, gfor);
10594   if (ret != GS_ALL_DONE)
10595     return GS_ERROR;
10596   *expr_p = NULL_TREE;
10597   return GS_ALL_DONE;
10598 }
10599 
10600 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10601    of OMP_TARGET's body.  */
10602 
10603 static tree
10604 find_omp_teams (tree *tp, int *walk_subtrees, void *)
10605 {
10606   *walk_subtrees = 0;
10607   switch (TREE_CODE (*tp))
10608     {
10609     case OMP_TEAMS:
10610       return *tp;
10611     case BIND_EXPR:
10612     case STATEMENT_LIST:
10613       *walk_subtrees = 1;
10614       break;
10615     default:
10616       break;
10617     }
10618   return NULL_TREE;
10619 }
10620 
10621 /* Helper function of optimize_target_teams, determine if the expression
10622    can be computed safely before the target construct on the host.  */
10623 
10624 static tree
10625 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
10626 {
10627   splay_tree_node n;
10628 
10629   if (TYPE_P (*tp))
10630     {
10631       *walk_subtrees = 0;
10632       return NULL_TREE;
10633     }
10634   switch (TREE_CODE (*tp))
10635     {
10636     case VAR_DECL:
10637     case PARM_DECL:
10638     case RESULT_DECL:
10639       *walk_subtrees = 0;
10640       if (error_operand_p (*tp)
10641 	  || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
10642 	  || DECL_HAS_VALUE_EXPR_P (*tp)
10643 	  || DECL_THREAD_LOCAL_P (*tp)
10644 	  || TREE_SIDE_EFFECTS (*tp)
10645 	  || TREE_THIS_VOLATILE (*tp))
10646 	return *tp;
10647       if (is_global_var (*tp)
10648 	  && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
10649 	      || lookup_attribute ("omp declare target link",
10650 				   DECL_ATTRIBUTES (*tp))))
10651 	return *tp;
10652       if (VAR_P (*tp)
10653 	  && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
10654 	  && !is_global_var (*tp)
10655 	  && decl_function_context (*tp) == current_function_decl)
10656 	return *tp;
10657       n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10658 			     (splay_tree_key) *tp);
10659       if (n == NULL)
10660 	{
10661 	  if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
10662 	    return NULL_TREE;
10663 	  return *tp;
10664 	}
10665       else if (n->value & GOVD_LOCAL)
10666 	return *tp;
10667       else if (n->value & GOVD_FIRSTPRIVATE)
10668 	return NULL_TREE;
10669       else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10670 	       == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10671 	return NULL_TREE;
10672       return *tp;
10673     case INTEGER_CST:
10674       if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10675 	return *tp;
10676       return NULL_TREE;
10677     case TARGET_EXPR:
10678       if (TARGET_EXPR_INITIAL (*tp)
10679 	  || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
10680 	return *tp;
10681       return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
10682 				      walk_subtrees, NULL);
10683     /* Allow some reasonable subset of integral arithmetics.  */
10684     case PLUS_EXPR:
10685     case MINUS_EXPR:
10686     case MULT_EXPR:
10687     case TRUNC_DIV_EXPR:
10688     case CEIL_DIV_EXPR:
10689     case FLOOR_DIV_EXPR:
10690     case ROUND_DIV_EXPR:
10691     case TRUNC_MOD_EXPR:
10692     case CEIL_MOD_EXPR:
10693     case FLOOR_MOD_EXPR:
10694     case ROUND_MOD_EXPR:
10695     case RDIV_EXPR:
10696     case EXACT_DIV_EXPR:
10697     case MIN_EXPR:
10698     case MAX_EXPR:
10699     case LSHIFT_EXPR:
10700     case RSHIFT_EXPR:
10701     case BIT_IOR_EXPR:
10702     case BIT_XOR_EXPR:
10703     case BIT_AND_EXPR:
10704     case NEGATE_EXPR:
10705     case ABS_EXPR:
10706     case BIT_NOT_EXPR:
10707     case NON_LVALUE_EXPR:
10708     CASE_CONVERT:
10709       if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10710 	return *tp;
10711       return NULL_TREE;
10712     /* And disallow anything else, except for comparisons.  */
10713     default:
10714       if (COMPARISON_CLASS_P (*tp))
10715 	return NULL_TREE;
10716       return *tp;
10717     }
10718 }
10719 
10720 /* Try to determine if the num_teams and/or thread_limit expressions
10721    can have their values determined already before entering the
10722    target construct.
10723    INTEGER_CSTs trivially are,
10724    integral decls that are firstprivate (explicitly or implicitly)
10725    or explicitly map(always, to:) or map(always, tofrom:) on the target
10726    region too, and expressions involving simple arithmetics on those
10727    too, function calls are not ok, dereferencing something neither etc.
10728    Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10729    EXPR based on what we find:
10730    0 stands for clause not specified at all, use implementation default
10731    -1 stands for value that can't be determined easily before entering
10732       the target construct.
10733    If teams construct is not present at all, use 1 for num_teams
10734    and 0 for thread_limit (only one team is involved, and the thread
10735    limit is implementation defined.  */
10736 
10737 static void
10738 optimize_target_teams (tree target, gimple_seq *pre_p)
10739 {
10740   tree body = OMP_BODY (target);
10741   tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
10742   tree num_teams = integer_zero_node;
10743   tree thread_limit = integer_zero_node;
10744   location_t num_teams_loc = EXPR_LOCATION (target);
10745   location_t thread_limit_loc = EXPR_LOCATION (target);
10746   tree c, *p, expr;
10747   struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
10748 
10749   if (teams == NULL_TREE)
10750     num_teams = integer_one_node;
10751   else
10752     for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
10753       {
10754 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
10755 	  {
10756 	    p = &num_teams;
10757 	    num_teams_loc = OMP_CLAUSE_LOCATION (c);
10758 	  }
10759 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
10760 	  {
10761 	    p = &thread_limit;
10762 	    thread_limit_loc = OMP_CLAUSE_LOCATION (c);
10763 	  }
10764 	else
10765 	  continue;
10766 	expr = OMP_CLAUSE_OPERAND (c, 0);
10767 	if (TREE_CODE (expr) == INTEGER_CST)
10768 	  {
10769 	    *p = expr;
10770 	    continue;
10771 	  }
10772 	if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
10773 	  {
10774 	    *p = integer_minus_one_node;
10775 	    continue;
10776 	  }
10777 	*p = expr;
10778 	gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
10779 	if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
10780 	    == GS_ERROR)
10781 	  {
10782 	    gimplify_omp_ctxp = target_ctx;
10783 	    *p = integer_minus_one_node;
10784 	    continue;
10785 	  }
10786 	gimplify_omp_ctxp = target_ctx;
10787 	if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
10788 	  OMP_CLAUSE_OPERAND (c, 0) = *p;
10789       }
10790   c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
10791   OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
10792   OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10793   OMP_TARGET_CLAUSES (target) = c;
10794   c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
10795   OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
10796   OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10797   OMP_TARGET_CLAUSES (target) = c;
10798 }
10799 
10800 /* Gimplify the gross structure of several OMP constructs.  */
10801 
10802 static void
10803 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
10804 {
10805   tree expr = *expr_p;
10806   gimple *stmt;
10807   gimple_seq body = NULL;
10808   enum omp_region_type ort;
10809 
10810   switch (TREE_CODE (expr))
10811     {
10812     case OMP_SECTIONS:
10813     case OMP_SINGLE:
10814       ort = ORT_WORKSHARE;
10815       break;
10816     case OMP_TARGET:
10817       ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
10818       break;
10819     case OACC_KERNELS:
10820       ort = ORT_ACC_KERNELS;
10821       break;
10822     case OACC_PARALLEL:
10823       ort = ORT_ACC_PARALLEL;
10824       break;
10825     case OACC_DATA:
10826       ort = ORT_ACC_DATA;
10827       break;
10828     case OMP_TARGET_DATA:
10829       ort = ORT_TARGET_DATA;
10830       break;
10831     case OMP_TEAMS:
10832       ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
10833       break;
10834     case OACC_HOST_DATA:
10835       ort = ORT_ACC_HOST_DATA;
10836       break;
10837     default:
10838       gcc_unreachable ();
10839     }
10840   gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
10841 			     TREE_CODE (expr));
10842   if (TREE_CODE (expr) == OMP_TARGET)
10843     optimize_target_teams (expr, pre_p);
10844   if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
10845     {
10846       push_gimplify_context ();
10847       gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
10848       if (gimple_code (g) == GIMPLE_BIND)
10849 	pop_gimplify_context (g);
10850       else
10851 	pop_gimplify_context (NULL);
10852       if ((ort & ORT_TARGET_DATA) != 0)
10853 	{
10854 	  enum built_in_function end_ix;
10855 	  switch (TREE_CODE (expr))
10856 	    {
10857 	    case OACC_DATA:
10858 	    case OACC_HOST_DATA:
10859 	      end_ix = BUILT_IN_GOACC_DATA_END;
10860 	      break;
10861 	    case OMP_TARGET_DATA:
10862 	      end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
10863 	      break;
10864 	    default:
10865 	      gcc_unreachable ();
10866 	    }
10867 	  tree fn = builtin_decl_explicit (end_ix);
10868 	  g = gimple_build_call (fn, 0);
10869 	  gimple_seq cleanup = NULL;
10870 	  gimple_seq_add_stmt (&cleanup, g);
10871 	  g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10872 	  body = NULL;
10873 	  gimple_seq_add_stmt (&body, g);
10874 	}
10875     }
10876   else
10877     gimplify_and_add (OMP_BODY (expr), &body);
10878   gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
10879 			       TREE_CODE (expr));
10880 
10881   switch (TREE_CODE (expr))
10882     {
10883     case OACC_DATA:
10884       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
10885 				      OMP_CLAUSES (expr));
10886       break;
10887     case OACC_KERNELS:
10888       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
10889 				      OMP_CLAUSES (expr));
10890       break;
10891     case OACC_HOST_DATA:
10892       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
10893 				      OMP_CLAUSES (expr));
10894       break;
10895     case OACC_PARALLEL:
10896       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
10897 				      OMP_CLAUSES (expr));
10898       break;
10899     case OMP_SECTIONS:
10900       stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
10901       break;
10902     case OMP_SINGLE:
10903       stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
10904       break;
10905     case OMP_TARGET:
10906       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
10907 				      OMP_CLAUSES (expr));
10908       break;
10909     case OMP_TARGET_DATA:
10910       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
10911 				      OMP_CLAUSES (expr));
10912       break;
10913     case OMP_TEAMS:
10914       stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
10915       break;
10916     default:
10917       gcc_unreachable ();
10918     }
10919 
10920   gimplify_seq_add_stmt (pre_p, stmt);
10921   *expr_p = NULL_TREE;
10922 }
10923 
10924 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10925    target update constructs.  */
10926 
10927 static void
10928 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
10929 {
10930   tree expr = *expr_p;
10931   int kind;
10932   gomp_target *stmt;
10933   enum omp_region_type ort = ORT_WORKSHARE;
10934 
10935   switch (TREE_CODE (expr))
10936     {
10937     case OACC_ENTER_DATA:
10938     case OACC_EXIT_DATA:
10939       kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
10940       ort = ORT_ACC;
10941       break;
10942     case OACC_UPDATE:
10943       kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
10944       ort = ORT_ACC;
10945       break;
10946     case OMP_TARGET_UPDATE:
10947       kind = GF_OMP_TARGET_KIND_UPDATE;
10948       break;
10949     case OMP_TARGET_ENTER_DATA:
10950       kind = GF_OMP_TARGET_KIND_ENTER_DATA;
10951       break;
10952     case OMP_TARGET_EXIT_DATA:
10953       kind = GF_OMP_TARGET_KIND_EXIT_DATA;
10954       break;
10955     default:
10956       gcc_unreachable ();
10957     }
10958   gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
10959 			     ort, TREE_CODE (expr));
10960   gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
10961 			       TREE_CODE (expr));
10962   stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
10963 
10964   gimplify_seq_add_stmt (pre_p, stmt);
10965   *expr_p = NULL_TREE;
10966 }
10967 
10968 /* A subroutine of gimplify_omp_atomic.  The front end is supposed to have
10969    stabilized the lhs of the atomic operation as *ADDR.  Return true if
10970    EXPR is this stabilized form.  */
10971 
10972 static bool
10973 goa_lhs_expr_p (tree expr, tree addr)
10974 {
10975   /* Also include casts to other type variants.  The C front end is fond
10976      of adding these for e.g. volatile variables.  This is like
10977      STRIP_TYPE_NOPS but includes the main variant lookup.  */
10978   STRIP_USELESS_TYPE_CONVERSION (expr);
10979 
10980   if (TREE_CODE (expr) == INDIRECT_REF)
10981     {
10982       expr = TREE_OPERAND (expr, 0);
10983       while (expr != addr
10984 	     && (CONVERT_EXPR_P (expr)
10985 		 || TREE_CODE (expr) == NON_LVALUE_EXPR)
10986 	     && TREE_CODE (expr) == TREE_CODE (addr)
10987 	     && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
10988 	{
10989 	  expr = TREE_OPERAND (expr, 0);
10990 	  addr = TREE_OPERAND (addr, 0);
10991 	}
10992       if (expr == addr)
10993 	return true;
10994       return (TREE_CODE (addr) == ADDR_EXPR
10995 	      && TREE_CODE (expr) == ADDR_EXPR
10996 	      && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
10997     }
10998   if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
10999     return true;
11000   return false;
11001 }
11002 
11003 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR.  If an
11004    expression does not involve the lhs, evaluate it into a temporary.
11005    Return 1 if the lhs appeared as a subexpression, 0 if it did not,
11006    or -1 if an error was encountered.  */
11007 
11008 static int
11009 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
11010 		    tree lhs_var)
11011 {
11012   tree expr = *expr_p;
11013   int saw_lhs;
11014 
11015   if (goa_lhs_expr_p (expr, lhs_addr))
11016     {
11017       *expr_p = lhs_var;
11018       return 1;
11019     }
11020   if (is_gimple_val (expr))
11021     return 0;
11022 
11023   saw_lhs = 0;
11024   switch (TREE_CODE_CLASS (TREE_CODE (expr)))
11025     {
11026     case tcc_binary:
11027     case tcc_comparison:
11028       saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
11029 				     lhs_var);
11030       /* FALLTHRU */
11031     case tcc_unary:
11032       saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
11033 				     lhs_var);
11034       break;
11035     case tcc_expression:
11036       switch (TREE_CODE (expr))
11037 	{
11038 	case TRUTH_ANDIF_EXPR:
11039 	case TRUTH_ORIF_EXPR:
11040 	case TRUTH_AND_EXPR:
11041 	case TRUTH_OR_EXPR:
11042 	case TRUTH_XOR_EXPR:
11043 	case BIT_INSERT_EXPR:
11044 	  saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
11045 					 lhs_addr, lhs_var);
11046 	  /* FALLTHRU */
11047 	case TRUTH_NOT_EXPR:
11048 	  saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
11049 					 lhs_addr, lhs_var);
11050 	  break;
11051 	case COMPOUND_EXPR:
11052 	  /* Break out any preevaluations from cp_build_modify_expr.  */
11053 	  for (; TREE_CODE (expr) == COMPOUND_EXPR;
11054 	       expr = TREE_OPERAND (expr, 1))
11055 	    gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
11056 	  *expr_p = expr;
11057 	  return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
11058 	default:
11059 	  break;
11060 	}
11061       break;
11062     case tcc_reference:
11063       if (TREE_CODE (expr) == BIT_FIELD_REF)
11064 	saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
11065 				       lhs_addr, lhs_var);
11066       break;
11067     default:
11068       break;
11069     }
11070 
11071   if (saw_lhs == 0)
11072     {
11073       enum gimplify_status gs;
11074       gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
11075       if (gs != GS_ALL_DONE)
11076 	saw_lhs = -1;
11077     }
11078 
11079   return saw_lhs;
11080 }
11081 
11082 /* Gimplify an OMP_ATOMIC statement.  */
11083 
11084 static enum gimplify_status
11085 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
11086 {
11087   tree addr = TREE_OPERAND (*expr_p, 0);
11088   tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
11089 	     ? NULL : TREE_OPERAND (*expr_p, 1);
11090   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
11091   tree tmp_load;
11092   gomp_atomic_load *loadstmt;
11093   gomp_atomic_store *storestmt;
11094 
11095   tmp_load = create_tmp_reg (type);
11096   if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
11097     return GS_ERROR;
11098 
11099   if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
11100       != GS_ALL_DONE)
11101     return GS_ERROR;
11102 
11103   loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
11104   gimplify_seq_add_stmt (pre_p, loadstmt);
11105   if (rhs)
11106     {
11107       /* BIT_INSERT_EXPR is not valid for non-integral bitfield
11108 	 representatives.  Use BIT_FIELD_REF on the lhs instead.  */
11109       if (TREE_CODE (rhs) == BIT_INSERT_EXPR
11110 	  && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
11111 	{
11112 	  tree bitpos = TREE_OPERAND (rhs, 2);
11113 	  tree op1 = TREE_OPERAND (rhs, 1);
11114 	  tree bitsize;
11115 	  tree tmp_store = tmp_load;
11116 	  if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
11117 	    tmp_store = get_initialized_tmp_var (tmp_load, pre_p, NULL);
11118 	  if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
11119 	    bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
11120 	  else
11121 	    bitsize = TYPE_SIZE (TREE_TYPE (op1));
11122 	  gcc_assert (TREE_OPERAND (rhs, 0) == tmp_load);
11123 	  tree t = build2_loc (EXPR_LOCATION (rhs),
11124 			       MODIFY_EXPR, void_type_node,
11125 			       build3_loc (EXPR_LOCATION (rhs), BIT_FIELD_REF,
11126 					   TREE_TYPE (op1), tmp_store, bitsize,
11127 					   bitpos), op1);
11128 	  gimplify_and_add (t, pre_p);
11129 	  rhs = tmp_store;
11130 	}
11131       if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
11132 	  != GS_ALL_DONE)
11133 	return GS_ERROR;
11134     }
11135 
11136   if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
11137     rhs = tmp_load;
11138   storestmt = gimple_build_omp_atomic_store (rhs);
11139   gimplify_seq_add_stmt (pre_p, storestmt);
11140   if (OMP_ATOMIC_SEQ_CST (*expr_p))
11141     {
11142       gimple_omp_atomic_set_seq_cst (loadstmt);
11143       gimple_omp_atomic_set_seq_cst (storestmt);
11144     }
11145   switch (TREE_CODE (*expr_p))
11146     {
11147     case OMP_ATOMIC_READ:
11148     case OMP_ATOMIC_CAPTURE_OLD:
11149       *expr_p = tmp_load;
11150       gimple_omp_atomic_set_need_value (loadstmt);
11151       break;
11152     case OMP_ATOMIC_CAPTURE_NEW:
11153       *expr_p = rhs;
11154       gimple_omp_atomic_set_need_value (storestmt);
11155       break;
11156     default:
11157       *expr_p = NULL;
11158       break;
11159     }
11160 
11161   return GS_ALL_DONE;
11162 }
11163 
11164 /* Gimplify a TRANSACTION_EXPR.  This involves gimplification of the
11165    body, and adding some EH bits.  */
11166 
11167 static enum gimplify_status
11168 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
11169 {
11170   tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
11171   gimple *body_stmt;
11172   gtransaction *trans_stmt;
11173   gimple_seq body = NULL;
11174   int subcode = 0;
11175 
11176   /* Wrap the transaction body in a BIND_EXPR so we have a context
11177      where to put decls for OMP.  */
11178   if (TREE_CODE (tbody) != BIND_EXPR)
11179     {
11180       tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
11181       TREE_SIDE_EFFECTS (bind) = 1;
11182       SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
11183       TRANSACTION_EXPR_BODY (expr) = bind;
11184     }
11185 
11186   push_gimplify_context ();
11187   temp = voidify_wrapper_expr (*expr_p, NULL);
11188 
11189   body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
11190   pop_gimplify_context (body_stmt);
11191 
11192   trans_stmt = gimple_build_transaction (body);
11193   if (TRANSACTION_EXPR_OUTER (expr))
11194     subcode = GTMA_IS_OUTER;
11195   else if (TRANSACTION_EXPR_RELAXED (expr))
11196     subcode = GTMA_IS_RELAXED;
11197   gimple_transaction_set_subcode (trans_stmt, subcode);
11198 
11199   gimplify_seq_add_stmt (pre_p, trans_stmt);
11200 
11201   if (temp)
11202     {
11203       *expr_p = temp;
11204       return GS_OK;
11205     }
11206 
11207   *expr_p = NULL_TREE;
11208   return GS_ALL_DONE;
11209 }
11210 
11211 /* Gimplify an OMP_ORDERED construct.  EXPR is the tree version.  BODY
11212    is the OMP_BODY of the original EXPR (which has already been
11213    gimplified so it's not present in the EXPR).
11214 
11215    Return the gimplified GIMPLE_OMP_ORDERED tuple.  */
11216 
11217 static gimple *
11218 gimplify_omp_ordered (tree expr, gimple_seq body)
11219 {
11220   tree c, decls;
11221   int failures = 0;
11222   unsigned int i;
11223   tree source_c = NULL_TREE;
11224   tree sink_c = NULL_TREE;
11225 
11226   if (gimplify_omp_ctxp)
11227     {
11228       for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11229 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11230 	    && gimplify_omp_ctxp->loop_iter_var.is_empty ()
11231 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
11232 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
11233 	  {
11234 	    error_at (OMP_CLAUSE_LOCATION (c),
11235 		      "%<ordered%> construct with %<depend%> clause must be "
11236 		      "closely nested inside a loop with %<ordered%> clause "
11237 		      "with a parameter");
11238 	    failures++;
11239 	  }
11240 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11241 		 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
11242 	  {
11243 	    bool fail = false;
11244 	    for (decls = OMP_CLAUSE_DECL (c), i = 0;
11245 		 decls && TREE_CODE (decls) == TREE_LIST;
11246 		 decls = TREE_CHAIN (decls), ++i)
11247 	      if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
11248 		continue;
11249 	      else if (TREE_VALUE (decls)
11250 		       != gimplify_omp_ctxp->loop_iter_var[2 * i])
11251 		{
11252 		  error_at (OMP_CLAUSE_LOCATION (c),
11253 			    "variable %qE is not an iteration "
11254 			    "of outermost loop %d, expected %qE",
11255 			    TREE_VALUE (decls), i + 1,
11256 			    gimplify_omp_ctxp->loop_iter_var[2 * i]);
11257 		  fail = true;
11258 		  failures++;
11259 		}
11260 	      else
11261 		TREE_VALUE (decls)
11262 		  = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
11263 	    if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
11264 	      {
11265 		error_at (OMP_CLAUSE_LOCATION (c),
11266 			  "number of variables in %<depend(sink)%> "
11267 			  "clause does not match number of "
11268 			  "iteration variables");
11269 		failures++;
11270 	      }
11271 	    sink_c = c;
11272 	  }
11273 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11274 		 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
11275 	  {
11276 	    if (source_c)
11277 	      {
11278 		error_at (OMP_CLAUSE_LOCATION (c),
11279 			  "more than one %<depend(source)%> clause on an "
11280 			  "%<ordered%> construct");
11281 		failures++;
11282 	      }
11283 	    else
11284 	      source_c = c;
11285 	  }
11286     }
11287   if (source_c && sink_c)
11288     {
11289       error_at (OMP_CLAUSE_LOCATION (source_c),
11290 		"%<depend(source)%> clause specified together with "
11291 		"%<depend(sink:)%> clauses on the same construct");
11292       failures++;
11293     }
11294 
11295   if (failures)
11296     return gimple_build_nop ();
11297   return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
11298 }
11299 
11300 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE.  If the
11301    expression produces a value to be used as an operand inside a GIMPLE
11302    statement, the value will be stored back in *EXPR_P.  This value will
11303    be a tree of class tcc_declaration, tcc_constant, tcc_reference or
11304    an SSA_NAME.  The corresponding sequence of GIMPLE statements is
11305    emitted in PRE_P and POST_P.
11306 
11307    Additionally, this process may overwrite parts of the input
11308    expression during gimplification.  Ideally, it should be
11309    possible to do non-destructive gimplification.
11310 
11311    EXPR_P points to the GENERIC expression to convert to GIMPLE.  If
11312       the expression needs to evaluate to a value to be used as
11313       an operand in a GIMPLE statement, this value will be stored in
11314       *EXPR_P on exit.  This happens when the caller specifies one
11315       of fb_lvalue or fb_rvalue fallback flags.
11316 
11317    PRE_P will contain the sequence of GIMPLE statements corresponding
11318        to the evaluation of EXPR and all the side-effects that must
11319        be executed before the main expression.  On exit, the last
11320        statement of PRE_P is the core statement being gimplified.  For
11321        instance, when gimplifying 'if (++a)' the last statement in
11322        PRE_P will be 'if (t.1)' where t.1 is the result of
11323        pre-incrementing 'a'.
11324 
11325    POST_P will contain the sequence of GIMPLE statements corresponding
11326        to the evaluation of all the side-effects that must be executed
11327        after the main expression.  If this is NULL, the post
11328        side-effects are stored at the end of PRE_P.
11329 
11330        The reason why the output is split in two is to handle post
11331        side-effects explicitly.  In some cases, an expression may have
11332        inner and outer post side-effects which need to be emitted in
11333        an order different from the one given by the recursive
11334        traversal.  For instance, for the expression (*p--)++ the post
11335        side-effects of '--' must actually occur *after* the post
11336        side-effects of '++'.  However, gimplification will first visit
11337        the inner expression, so if a separate POST sequence was not
11338        used, the resulting sequence would be:
11339 
11340        	    1	t.1 = *p
11341        	    2	p = p - 1
11342        	    3	t.2 = t.1 + 1
11343        	    4	*p = t.2
11344 
11345        However, the post-decrement operation in line #2 must not be
11346        evaluated until after the store to *p at line #4, so the
11347        correct sequence should be:
11348 
11349        	    1	t.1 = *p
11350        	    2	t.2 = t.1 + 1
11351        	    3	*p = t.2
11352        	    4	p = p - 1
11353 
11354        So, by specifying a separate post queue, it is possible
11355        to emit the post side-effects in the correct order.
11356        If POST_P is NULL, an internal queue will be used.  Before
11357        returning to the caller, the sequence POST_P is appended to
11358        the main output sequence PRE_P.
11359 
11360    GIMPLE_TEST_F points to a function that takes a tree T and
11361        returns nonzero if T is in the GIMPLE form requested by the
11362        caller.  The GIMPLE predicates are in gimple.c.
11363 
11364    FALLBACK tells the function what sort of a temporary we want if
11365        gimplification cannot produce an expression that complies with
11366        GIMPLE_TEST_F.
11367 
11368        fb_none means that no temporary should be generated
11369        fb_rvalue means that an rvalue is OK to generate
11370        fb_lvalue means that an lvalue is OK to generate
11371        fb_either means that either is OK, but an lvalue is preferable.
11372        fb_mayfail means that gimplification may fail (in which case
11373        GS_ERROR will be returned)
11374 
11375    The return value is either GS_ERROR or GS_ALL_DONE, since this
11376    function iterates until EXPR is completely gimplified or an error
11377    occurs.  */
11378 
11379 enum gimplify_status
11380 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11381 	       bool (*gimple_test_f) (tree), fallback_t fallback)
11382 {
11383   tree tmp;
11384   gimple_seq internal_pre = NULL;
11385   gimple_seq internal_post = NULL;
11386   tree save_expr;
11387   bool is_statement;
11388   location_t saved_location;
11389   enum gimplify_status ret;
11390   gimple_stmt_iterator pre_last_gsi, post_last_gsi;
11391   tree label;
11392 
11393   save_expr = *expr_p;
11394   if (save_expr == NULL_TREE)
11395     return GS_ALL_DONE;
11396 
11397   /* If we are gimplifying a top-level statement, PRE_P must be valid.  */
11398   is_statement = gimple_test_f == is_gimple_stmt;
11399   if (is_statement)
11400     gcc_assert (pre_p);
11401 
11402   /* Consistency checks.  */
11403   if (gimple_test_f == is_gimple_reg)
11404     gcc_assert (fallback & (fb_rvalue | fb_lvalue));
11405   else if (gimple_test_f == is_gimple_val
11406            || gimple_test_f == is_gimple_call_addr
11407            || gimple_test_f == is_gimple_condexpr
11408            || gimple_test_f == is_gimple_mem_rhs
11409            || gimple_test_f == is_gimple_mem_rhs_or_call
11410            || gimple_test_f == is_gimple_reg_rhs
11411            || gimple_test_f == is_gimple_reg_rhs_or_call
11412            || gimple_test_f == is_gimple_asm_val
11413 	   || gimple_test_f == is_gimple_mem_ref_addr)
11414     gcc_assert (fallback & fb_rvalue);
11415   else if (gimple_test_f == is_gimple_min_lval
11416 	   || gimple_test_f == is_gimple_lvalue)
11417     gcc_assert (fallback & fb_lvalue);
11418   else if (gimple_test_f == is_gimple_addressable)
11419     gcc_assert (fallback & fb_either);
11420   else if (gimple_test_f == is_gimple_stmt)
11421     gcc_assert (fallback == fb_none);
11422   else
11423     {
11424       /* We should have recognized the GIMPLE_TEST_F predicate to
11425 	 know what kind of fallback to use in case a temporary is
11426 	 needed to hold the value or address of *EXPR_P.  */
11427       gcc_unreachable ();
11428     }
11429 
11430   /* We used to check the predicate here and return immediately if it
11431      succeeds.  This is wrong; the design is for gimplification to be
11432      idempotent, and for the predicates to only test for valid forms, not
11433      whether they are fully simplified.  */
11434   if (pre_p == NULL)
11435     pre_p = &internal_pre;
11436 
11437   if (post_p == NULL)
11438     post_p = &internal_post;
11439 
11440   /* Remember the last statements added to PRE_P and POST_P.  Every
11441      new statement added by the gimplification helpers needs to be
11442      annotated with location information.  To centralize the
11443      responsibility, we remember the last statement that had been
11444      added to both queues before gimplifying *EXPR_P.  If
11445      gimplification produces new statements in PRE_P and POST_P, those
11446      statements will be annotated with the same location information
11447      as *EXPR_P.  */
11448   pre_last_gsi = gsi_last (*pre_p);
11449   post_last_gsi = gsi_last (*post_p);
11450 
11451   saved_location = input_location;
11452   if (save_expr != error_mark_node
11453       && EXPR_HAS_LOCATION (*expr_p))
11454     input_location = EXPR_LOCATION (*expr_p);
11455 
11456   /* Loop over the specific gimplifiers until the toplevel node
11457      remains the same.  */
11458   do
11459     {
11460       /* Strip away as many useless type conversions as possible
11461 	 at the toplevel.  */
11462       STRIP_USELESS_TYPE_CONVERSION (*expr_p);
11463 
11464       /* Remember the expr.  */
11465       save_expr = *expr_p;
11466 
11467       /* Die, die, die, my darling.  */
11468       if (error_operand_p (save_expr))
11469 	{
11470 	  ret = GS_ERROR;
11471 	  break;
11472 	}
11473 
11474       /* Do any language-specific gimplification.  */
11475       ret = ((enum gimplify_status)
11476 	     lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
11477       if (ret == GS_OK)
11478 	{
11479 	  if (*expr_p == NULL_TREE)
11480 	    break;
11481 	  if (*expr_p != save_expr)
11482 	    continue;
11483 	}
11484       else if (ret != GS_UNHANDLED)
11485 	break;
11486 
11487       /* Make sure that all the cases set 'ret' appropriately.  */
11488       ret = GS_UNHANDLED;
11489       switch (TREE_CODE (*expr_p))
11490 	{
11491 	  /* First deal with the special cases.  */
11492 
11493 	case POSTINCREMENT_EXPR:
11494 	case POSTDECREMENT_EXPR:
11495 	case PREINCREMENT_EXPR:
11496 	case PREDECREMENT_EXPR:
11497 	  ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
11498 					fallback != fb_none,
11499 					TREE_TYPE (*expr_p));
11500 	  break;
11501 
11502 	case VIEW_CONVERT_EXPR:
11503 	  if (is_gimple_reg_type (TREE_TYPE (*expr_p))
11504 	      && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
11505 	    {
11506 	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11507 				   post_p, is_gimple_val, fb_rvalue);
11508 	      recalculate_side_effects (*expr_p);
11509 	      break;
11510 	    }
11511 	  /* Fallthru.  */
11512 
11513 	case ARRAY_REF:
11514 	case ARRAY_RANGE_REF:
11515 	case REALPART_EXPR:
11516 	case IMAGPART_EXPR:
11517 	case COMPONENT_REF:
11518 	  ret = gimplify_compound_lval (expr_p, pre_p, post_p,
11519 					fallback ? fallback : fb_rvalue);
11520 	  break;
11521 
11522 	case COND_EXPR:
11523 	  ret = gimplify_cond_expr (expr_p, pre_p, fallback);
11524 
11525 	  /* C99 code may assign to an array in a structure value of a
11526 	     conditional expression, and this has undefined behavior
11527 	     only on execution, so create a temporary if an lvalue is
11528 	     required.  */
11529 	  if (fallback == fb_lvalue)
11530 	    {
11531 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11532 	      mark_addressable (*expr_p);
11533 	      ret = GS_OK;
11534 	    }
11535 	  break;
11536 
11537 	case CALL_EXPR:
11538 	  ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
11539 
11540 	  /* C99 code may assign to an array in a structure returned
11541 	     from a function, and this has undefined behavior only on
11542 	     execution, so create a temporary if an lvalue is
11543 	     required.  */
11544 	  if (fallback == fb_lvalue)
11545 	    {
11546 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11547 	      mark_addressable (*expr_p);
11548 	      ret = GS_OK;
11549 	    }
11550 	  break;
11551 
11552 	case TREE_LIST:
11553 	  gcc_unreachable ();
11554 
11555 	case COMPOUND_EXPR:
11556 	  ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
11557 	  break;
11558 
11559 	case COMPOUND_LITERAL_EXPR:
11560 	  ret = gimplify_compound_literal_expr (expr_p, pre_p,
11561 						gimple_test_f, fallback);
11562 	  break;
11563 
11564 	case MODIFY_EXPR:
11565 	case INIT_EXPR:
11566 	  ret = gimplify_modify_expr (expr_p, pre_p, post_p,
11567 				      fallback != fb_none);
11568 	  break;
11569 
11570 	case TRUTH_ANDIF_EXPR:
11571 	case TRUTH_ORIF_EXPR:
11572 	  {
11573 	    /* Preserve the original type of the expression and the
11574 	       source location of the outer expression.  */
11575 	    tree org_type = TREE_TYPE (*expr_p);
11576 	    *expr_p = gimple_boolify (*expr_p);
11577 	    *expr_p = build3_loc (input_location, COND_EXPR,
11578 				  org_type, *expr_p,
11579 				  fold_convert_loc
11580 				    (input_location,
11581 				     org_type, boolean_true_node),
11582 				  fold_convert_loc
11583 				    (input_location,
11584 				     org_type, boolean_false_node));
11585 	    ret = GS_OK;
11586 	    break;
11587 	  }
11588 
11589 	case TRUTH_NOT_EXPR:
11590 	  {
11591 	    tree type = TREE_TYPE (*expr_p);
11592 	    /* The parsers are careful to generate TRUTH_NOT_EXPR
11593 	       only with operands that are always zero or one.
11594 	       We do not fold here but handle the only interesting case
11595 	       manually, as fold may re-introduce the TRUTH_NOT_EXPR.  */
11596 	    *expr_p = gimple_boolify (*expr_p);
11597 	    if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
11598 	      *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
11599 				    TREE_TYPE (*expr_p),
11600 				    TREE_OPERAND (*expr_p, 0));
11601 	    else
11602 	      *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
11603 				    TREE_TYPE (*expr_p),
11604 				    TREE_OPERAND (*expr_p, 0),
11605 				    build_int_cst (TREE_TYPE (*expr_p), 1));
11606 	    if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
11607 	      *expr_p = fold_convert_loc (input_location, type, *expr_p);
11608 	    ret = GS_OK;
11609 	    break;
11610 	  }
11611 
11612 	case ADDR_EXPR:
11613 	  ret = gimplify_addr_expr (expr_p, pre_p, post_p);
11614 	  break;
11615 
11616 	case ANNOTATE_EXPR:
11617 	  {
11618 	    tree cond = TREE_OPERAND (*expr_p, 0);
11619 	    tree kind = TREE_OPERAND (*expr_p, 1);
11620 	    tree data = TREE_OPERAND (*expr_p, 2);
11621 	    tree type = TREE_TYPE (cond);
11622 	    if (!INTEGRAL_TYPE_P (type))
11623 	      {
11624 		*expr_p = cond;
11625 		ret = GS_OK;
11626 		break;
11627 	      }
11628 	    tree tmp = create_tmp_var (type);
11629 	    gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
11630 	    gcall *call
11631 	      = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
11632 	    gimple_call_set_lhs (call, tmp);
11633 	    gimplify_seq_add_stmt (pre_p, call);
11634 	    *expr_p = tmp;
11635 	    ret = GS_ALL_DONE;
11636 	    break;
11637 	  }
11638 
11639 	case VA_ARG_EXPR:
11640 	  ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
11641 	  break;
11642 
11643 	CASE_CONVERT:
11644 	  if (IS_EMPTY_STMT (*expr_p))
11645 	    {
11646 	      ret = GS_ALL_DONE;
11647 	      break;
11648 	    }
11649 
11650 	  if (VOID_TYPE_P (TREE_TYPE (*expr_p))
11651 	      || fallback == fb_none)
11652 	    {
11653 	      /* Just strip a conversion to void (or in void context) and
11654 		 try again.  */
11655 	      *expr_p = TREE_OPERAND (*expr_p, 0);
11656 	      ret = GS_OK;
11657 	      break;
11658 	    }
11659 
11660 	  ret = gimplify_conversion (expr_p);
11661 	  if (ret == GS_ERROR)
11662 	    break;
11663 	  if (*expr_p != save_expr)
11664 	    break;
11665 	  /* FALLTHRU */
11666 
11667 	case FIX_TRUNC_EXPR:
11668 	  /* unary_expr: ... | '(' cast ')' val | ...  */
11669 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11670 			       is_gimple_val, fb_rvalue);
11671 	  recalculate_side_effects (*expr_p);
11672 	  break;
11673 
11674 	case INDIRECT_REF:
11675 	  {
11676 	    bool volatilep = TREE_THIS_VOLATILE (*expr_p);
11677 	    bool notrap = TREE_THIS_NOTRAP (*expr_p);
11678 	    tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
11679 
11680 	    *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
11681 	    if (*expr_p != save_expr)
11682 	      {
11683 		ret = GS_OK;
11684 		break;
11685 	      }
11686 
11687 	    ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11688 				 is_gimple_reg, fb_rvalue);
11689 	    if (ret == GS_ERROR)
11690 	      break;
11691 
11692 	    recalculate_side_effects (*expr_p);
11693 	    *expr_p = fold_build2_loc (input_location, MEM_REF,
11694 				       TREE_TYPE (*expr_p),
11695 				       TREE_OPERAND (*expr_p, 0),
11696 				       build_int_cst (saved_ptr_type, 0));
11697 	    TREE_THIS_VOLATILE (*expr_p) = volatilep;
11698 	    TREE_THIS_NOTRAP (*expr_p) = notrap;
11699 	    ret = GS_OK;
11700 	    break;
11701 	  }
11702 
11703 	/* We arrive here through the various re-gimplifcation paths.  */
11704 	case MEM_REF:
11705 	  /* First try re-folding the whole thing.  */
11706 	  tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
11707 			     TREE_OPERAND (*expr_p, 0),
11708 			     TREE_OPERAND (*expr_p, 1));
11709 	  if (tmp)
11710 	    {
11711 	      REF_REVERSE_STORAGE_ORDER (tmp)
11712 	        = REF_REVERSE_STORAGE_ORDER (*expr_p);
11713 	      *expr_p = tmp;
11714 	      recalculate_side_effects (*expr_p);
11715 	      ret = GS_OK;
11716 	      break;
11717 	    }
11718 	  /* Avoid re-gimplifying the address operand if it is already
11719 	     in suitable form.  Re-gimplifying would mark the address
11720 	     operand addressable.  Always gimplify when not in SSA form
11721 	     as we still may have to gimplify decls with value-exprs.  */
11722 	  if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
11723 	      || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
11724 	    {
11725 	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11726 				   is_gimple_mem_ref_addr, fb_rvalue);
11727 	      if (ret == GS_ERROR)
11728 		break;
11729 	    }
11730 	  recalculate_side_effects (*expr_p);
11731 	  ret = GS_ALL_DONE;
11732 	  break;
11733 
11734 	/* Constants need not be gimplified.  */
11735 	case INTEGER_CST:
11736 	case REAL_CST:
11737 	case FIXED_CST:
11738 	case STRING_CST:
11739 	case COMPLEX_CST:
11740 	case VECTOR_CST:
11741 	  /* Drop the overflow flag on constants, we do not want
11742 	     that in the GIMPLE IL.  */
11743 	  if (TREE_OVERFLOW_P (*expr_p))
11744 	    *expr_p = drop_tree_overflow (*expr_p);
11745 	  ret = GS_ALL_DONE;
11746 	  break;
11747 
11748 	case CONST_DECL:
11749 	  /* If we require an lvalue, such as for ADDR_EXPR, retain the
11750 	     CONST_DECL node.  Otherwise the decl is replaceable by its
11751 	     value.  */
11752 	  /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either.  */
11753 	  if (fallback & fb_lvalue)
11754 	    ret = GS_ALL_DONE;
11755 	  else
11756 	    {
11757 	      *expr_p = DECL_INITIAL (*expr_p);
11758 	      ret = GS_OK;
11759 	    }
11760 	  break;
11761 
11762 	case DECL_EXPR:
11763 	  ret = gimplify_decl_expr (expr_p, pre_p);
11764 	  break;
11765 
11766 	case BIND_EXPR:
11767 	  ret = gimplify_bind_expr (expr_p, pre_p);
11768 	  break;
11769 
11770 	case LOOP_EXPR:
11771 	  ret = gimplify_loop_expr (expr_p, pre_p);
11772 	  break;
11773 
11774 	case SWITCH_EXPR:
11775 	  ret = gimplify_switch_expr (expr_p, pre_p);
11776 	  break;
11777 
11778 	case EXIT_EXPR:
11779 	  ret = gimplify_exit_expr (expr_p);
11780 	  break;
11781 
11782 	case GOTO_EXPR:
11783 	  /* If the target is not LABEL, then it is a computed jump
11784 	     and the target needs to be gimplified.  */
11785 	  if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
11786 	    {
11787 	      ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
11788 				   NULL, is_gimple_val, fb_rvalue);
11789 	      if (ret == GS_ERROR)
11790 		break;
11791 	    }
11792 	  gimplify_seq_add_stmt (pre_p,
11793 			  gimple_build_goto (GOTO_DESTINATION (*expr_p)));
11794 	  ret = GS_ALL_DONE;
11795 	  break;
11796 
11797 	case PREDICT_EXPR:
11798 	  gimplify_seq_add_stmt (pre_p,
11799 			gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
11800 					      PREDICT_EXPR_OUTCOME (*expr_p)));
11801 	  ret = GS_ALL_DONE;
11802 	  break;
11803 
11804 	case LABEL_EXPR:
11805 	  ret = gimplify_label_expr (expr_p, pre_p);
11806 	  label = LABEL_EXPR_LABEL (*expr_p);
11807 	  gcc_assert (decl_function_context (label) == current_function_decl);
11808 
11809 	  /* If the label is used in a goto statement, or address of the label
11810 	     is taken, we need to unpoison all variables that were seen so far.
11811 	     Doing so would prevent us from reporting a false positives.  */
11812 	  if (asan_poisoned_variables
11813 	      && asan_used_labels != NULL
11814 	      && asan_used_labels->contains (label))
11815 	    asan_poison_variables (asan_poisoned_variables, false, pre_p);
11816 	  break;
11817 
11818 	case CASE_LABEL_EXPR:
11819 	  ret = gimplify_case_label_expr (expr_p, pre_p);
11820 
11821 	  if (gimplify_ctxp->live_switch_vars)
11822 	    asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
11823 				   pre_p);
11824 	  break;
11825 
11826 	case RETURN_EXPR:
11827 	  ret = gimplify_return_expr (*expr_p, pre_p);
11828 	  break;
11829 
11830 	case CONSTRUCTOR:
11831 	  /* Don't reduce this in place; let gimplify_init_constructor work its
11832 	     magic.  Buf if we're just elaborating this for side effects, just
11833 	     gimplify any element that has side-effects.  */
11834 	  if (fallback == fb_none)
11835 	    {
11836 	      unsigned HOST_WIDE_INT ix;
11837 	      tree val;
11838 	      tree temp = NULL_TREE;
11839 	      FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
11840 		if (TREE_SIDE_EFFECTS (val))
11841 		  append_to_statement_list (val, &temp);
11842 
11843 	      *expr_p = temp;
11844 	      ret = temp ? GS_OK : GS_ALL_DONE;
11845 	    }
11846 	  /* C99 code may assign to an array in a constructed
11847 	     structure or union, and this has undefined behavior only
11848 	     on execution, so create a temporary if an lvalue is
11849 	     required.  */
11850 	  else if (fallback == fb_lvalue)
11851 	    {
11852 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11853 	      mark_addressable (*expr_p);
11854 	      ret = GS_OK;
11855 	    }
11856 	  else
11857 	    ret = GS_ALL_DONE;
11858 	  break;
11859 
11860 	  /* The following are special cases that are not handled by the
11861 	     original GIMPLE grammar.  */
11862 
11863 	  /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11864 	     eliminated.  */
11865 	case SAVE_EXPR:
11866 	  ret = gimplify_save_expr (expr_p, pre_p, post_p);
11867 	  break;
11868 
11869 	case BIT_FIELD_REF:
11870 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11871 			       post_p, is_gimple_lvalue, fb_either);
11872 	  recalculate_side_effects (*expr_p);
11873 	  break;
11874 
11875 	case TARGET_MEM_REF:
11876 	  {
11877 	    enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
11878 
11879 	    if (TMR_BASE (*expr_p))
11880 	      r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
11881 				  post_p, is_gimple_mem_ref_addr, fb_either);
11882 	    if (TMR_INDEX (*expr_p))
11883 	      r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
11884 				  post_p, is_gimple_val, fb_rvalue);
11885 	    if (TMR_INDEX2 (*expr_p))
11886 	      r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
11887 				  post_p, is_gimple_val, fb_rvalue);
11888 	    /* TMR_STEP and TMR_OFFSET are always integer constants.  */
11889 	    ret = MIN (r0, r1);
11890 	  }
11891 	  break;
11892 
11893 	case NON_LVALUE_EXPR:
11894 	  /* This should have been stripped above.  */
11895 	  gcc_unreachable ();
11896 
11897 	case ASM_EXPR:
11898 	  ret = gimplify_asm_expr (expr_p, pre_p, post_p);
11899 	  break;
11900 
11901 	case TRY_FINALLY_EXPR:
11902 	case TRY_CATCH_EXPR:
11903 	  {
11904 	    gimple_seq eval, cleanup;
11905 	    gtry *try_;
11906 
11907 	    /* Calls to destructors are generated automatically in FINALLY/CATCH
11908 	       block. They should have location as UNKNOWN_LOCATION. However,
11909 	       gimplify_call_expr will reset these call stmts to input_location
11910 	       if it finds stmt's location is unknown. To prevent resetting for
11911 	       destructors, we set the input_location to unknown.
11912 	       Note that this only affects the destructor calls in FINALLY/CATCH
11913 	       block, and will automatically reset to its original value by the
11914 	       end of gimplify_expr.  */
11915 	    input_location = UNKNOWN_LOCATION;
11916 	    eval = cleanup = NULL;
11917 	    gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
11918 	    gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
11919 	    /* Don't create bogus GIMPLE_TRY with empty cleanup.  */
11920 	    if (gimple_seq_empty_p (cleanup))
11921 	      {
11922 		gimple_seq_add_seq (pre_p, eval);
11923 		ret = GS_ALL_DONE;
11924 		break;
11925 	      }
11926 	    try_ = gimple_build_try (eval, cleanup,
11927 				     TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
11928 				     ? GIMPLE_TRY_FINALLY
11929 				     : GIMPLE_TRY_CATCH);
11930 	    if (EXPR_HAS_LOCATION (save_expr))
11931 	      gimple_set_location (try_, EXPR_LOCATION (save_expr));
11932 	    else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
11933 	      gimple_set_location (try_, saved_location);
11934 	    if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
11935 	      gimple_try_set_catch_is_cleanup (try_,
11936 					       TRY_CATCH_IS_CLEANUP (*expr_p));
11937 	    gimplify_seq_add_stmt (pre_p, try_);
11938 	    ret = GS_ALL_DONE;
11939 	    break;
11940 	  }
11941 
11942 	case CLEANUP_POINT_EXPR:
11943 	  ret = gimplify_cleanup_point_expr (expr_p, pre_p);
11944 	  break;
11945 
11946 	case TARGET_EXPR:
11947 	  ret = gimplify_target_expr (expr_p, pre_p, post_p);
11948 	  break;
11949 
11950 	case CATCH_EXPR:
11951 	  {
11952 	    gimple *c;
11953 	    gimple_seq handler = NULL;
11954 	    gimplify_and_add (CATCH_BODY (*expr_p), &handler);
11955 	    c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
11956 	    gimplify_seq_add_stmt (pre_p, c);
11957 	    ret = GS_ALL_DONE;
11958 	    break;
11959 	  }
11960 
11961 	case EH_FILTER_EXPR:
11962 	  {
11963 	    gimple *ehf;
11964 	    gimple_seq failure = NULL;
11965 
11966 	    gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
11967 	    ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
11968 	    gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
11969 	    gimplify_seq_add_stmt (pre_p, ehf);
11970 	    ret = GS_ALL_DONE;
11971 	    break;
11972 	  }
11973 
11974 	case OBJ_TYPE_REF:
11975 	  {
11976 	    enum gimplify_status r0, r1;
11977 	    r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
11978 				post_p, is_gimple_val, fb_rvalue);
11979 	    r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
11980 				post_p, is_gimple_val, fb_rvalue);
11981 	    TREE_SIDE_EFFECTS (*expr_p) = 0;
11982 	    ret = MIN (r0, r1);
11983 	  }
11984 	  break;
11985 
11986 	case LABEL_DECL:
11987 	  /* We get here when taking the address of a label.  We mark
11988 	     the label as "forced"; meaning it can never be removed and
11989 	     it is a potential target for any computed goto.  */
11990 	  FORCED_LABEL (*expr_p) = 1;
11991 	  ret = GS_ALL_DONE;
11992 	  break;
11993 
11994 	case STATEMENT_LIST:
11995 	  ret = gimplify_statement_list (expr_p, pre_p);
11996 	  break;
11997 
11998 	case WITH_SIZE_EXPR:
11999 	  {
12000 	    gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12001 			   post_p == &internal_post ? NULL : post_p,
12002 			   gimple_test_f, fallback);
12003 	    gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12004 			   is_gimple_val, fb_rvalue);
12005 	    ret = GS_ALL_DONE;
12006 	  }
12007 	  break;
12008 
12009 	case VAR_DECL:
12010 	case PARM_DECL:
12011 	  ret = gimplify_var_or_parm_decl (expr_p);
12012 	  break;
12013 
12014 	case RESULT_DECL:
12015 	  /* When within an OMP context, notice uses of variables.  */
12016 	  if (gimplify_omp_ctxp)
12017 	    omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
12018 	  ret = GS_ALL_DONE;
12019 	  break;
12020 
12021 	case DEBUG_EXPR_DECL:
12022 	  gcc_unreachable ();
12023 
12024 	case DEBUG_BEGIN_STMT:
12025 	  gimplify_seq_add_stmt (pre_p,
12026 				 gimple_build_debug_begin_stmt
12027 				 (TREE_BLOCK (*expr_p),
12028 				  EXPR_LOCATION (*expr_p)));
12029 	  ret = GS_ALL_DONE;
12030 	  *expr_p = NULL;
12031 	  break;
12032 
12033 	case SSA_NAME:
12034 	  /* Allow callbacks into the gimplifier during optimization.  */
12035 	  ret = GS_ALL_DONE;
12036 	  break;
12037 
12038 	case OMP_PARALLEL:
12039 	  gimplify_omp_parallel (expr_p, pre_p);
12040 	  ret = GS_ALL_DONE;
12041 	  break;
12042 
12043 	case OMP_TASK:
12044 	  gimplify_omp_task (expr_p, pre_p);
12045 	  ret = GS_ALL_DONE;
12046 	  break;
12047 
12048 	case OMP_FOR:
12049 	case OMP_SIMD:
12050 	case OMP_DISTRIBUTE:
12051 	case OMP_TASKLOOP:
12052 	case OACC_LOOP:
12053 	  ret = gimplify_omp_for (expr_p, pre_p);
12054 	  break;
12055 
12056 	case OACC_CACHE:
12057 	  gimplify_oacc_cache (expr_p, pre_p);
12058 	  ret = GS_ALL_DONE;
12059 	  break;
12060 
12061 	case OACC_DECLARE:
12062 	  gimplify_oacc_declare (expr_p, pre_p);
12063 	  ret = GS_ALL_DONE;
12064 	  break;
12065 
12066 	case OACC_HOST_DATA:
12067 	case OACC_DATA:
12068 	case OACC_KERNELS:
12069 	case OACC_PARALLEL:
12070 	case OMP_SECTIONS:
12071 	case OMP_SINGLE:
12072 	case OMP_TARGET:
12073 	case OMP_TARGET_DATA:
12074 	case OMP_TEAMS:
12075 	  gimplify_omp_workshare (expr_p, pre_p);
12076 	  ret = GS_ALL_DONE;
12077 	  break;
12078 
12079 	case OACC_ENTER_DATA:
12080 	case OACC_EXIT_DATA:
12081 	case OACC_UPDATE:
12082 	case OMP_TARGET_UPDATE:
12083 	case OMP_TARGET_ENTER_DATA:
12084 	case OMP_TARGET_EXIT_DATA:
12085 	  gimplify_omp_target_update (expr_p, pre_p);
12086 	  ret = GS_ALL_DONE;
12087 	  break;
12088 
12089 	case OMP_SECTION:
12090 	case OMP_MASTER:
12091 	case OMP_TASKGROUP:
12092 	case OMP_ORDERED:
12093 	case OMP_CRITICAL:
12094 	  {
12095 	    gimple_seq body = NULL;
12096 	    gimple *g;
12097 
12098 	    gimplify_and_add (OMP_BODY (*expr_p), &body);
12099 	    switch (TREE_CODE (*expr_p))
12100 	      {
12101 	      case OMP_SECTION:
12102 	        g = gimple_build_omp_section (body);
12103 	        break;
12104 	      case OMP_MASTER:
12105 	        g = gimple_build_omp_master (body);
12106 		break;
12107 	      case OMP_TASKGROUP:
12108 		{
12109 		  gimple_seq cleanup = NULL;
12110 		  tree fn
12111 		    = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
12112 		  g = gimple_build_call (fn, 0);
12113 		  gimple_seq_add_stmt (&cleanup, g);
12114 		  g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
12115 		  body = NULL;
12116 		  gimple_seq_add_stmt (&body, g);
12117 		  g = gimple_build_omp_taskgroup (body);
12118 		}
12119 		break;
12120 	      case OMP_ORDERED:
12121 		g = gimplify_omp_ordered (*expr_p, body);
12122 		break;
12123 	      case OMP_CRITICAL:
12124 		gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
12125 					   pre_p, ORT_WORKSHARE, OMP_CRITICAL);
12126 		gimplify_adjust_omp_clauses (pre_p, body,
12127 					     &OMP_CRITICAL_CLAUSES (*expr_p),
12128 					     OMP_CRITICAL);
12129 		g = gimple_build_omp_critical (body,
12130 		    			       OMP_CRITICAL_NAME (*expr_p),
12131 		    			       OMP_CRITICAL_CLAUSES (*expr_p));
12132 		break;
12133 	      default:
12134 		gcc_unreachable ();
12135 	      }
12136 	    gimplify_seq_add_stmt (pre_p, g);
12137 	    ret = GS_ALL_DONE;
12138 	    break;
12139 	  }
12140 
12141 	case OMP_ATOMIC:
12142 	case OMP_ATOMIC_READ:
12143 	case OMP_ATOMIC_CAPTURE_OLD:
12144 	case OMP_ATOMIC_CAPTURE_NEW:
12145 	  ret = gimplify_omp_atomic (expr_p, pre_p);
12146 	  break;
12147 
12148 	case TRANSACTION_EXPR:
12149 	  ret = gimplify_transaction (expr_p, pre_p);
12150 	  break;
12151 
12152 	case TRUTH_AND_EXPR:
12153 	case TRUTH_OR_EXPR:
12154 	case TRUTH_XOR_EXPR:
12155 	  {
12156 	    tree orig_type = TREE_TYPE (*expr_p);
12157 	    tree new_type, xop0, xop1;
12158 	    *expr_p = gimple_boolify (*expr_p);
12159 	    new_type = TREE_TYPE (*expr_p);
12160 	    if (!useless_type_conversion_p (orig_type, new_type))
12161 	      {
12162 		*expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
12163 		ret = GS_OK;
12164 		break;
12165 	      }
12166 
12167 	  /* Boolified binary truth expressions are semantically equivalent
12168 	     to bitwise binary expressions.  Canonicalize them to the
12169 	     bitwise variant.  */
12170 	    switch (TREE_CODE (*expr_p))
12171 	      {
12172 	      case TRUTH_AND_EXPR:
12173 		TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
12174 		break;
12175 	      case TRUTH_OR_EXPR:
12176 		TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
12177 		break;
12178 	      case TRUTH_XOR_EXPR:
12179 		TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
12180 		break;
12181 	      default:
12182 		break;
12183 	      }
12184 	    /* Now make sure that operands have compatible type to
12185 	       expression's new_type.  */
12186 	    xop0 = TREE_OPERAND (*expr_p, 0);
12187 	    xop1 = TREE_OPERAND (*expr_p, 1);
12188 	    if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
12189 	      TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
12190 							    new_type,
12191 	      						    xop0);
12192 	    if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
12193 	      TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
12194 							    new_type,
12195 	      						    xop1);
12196 	    /* Continue classified as tcc_binary.  */
12197 	    goto expr_2;
12198 	  }
12199 
12200 	case VEC_COND_EXPR:
12201 	  {
12202 	    enum gimplify_status r0, r1, r2;
12203 
12204 	    r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12205 				post_p, is_gimple_condexpr, fb_rvalue);
12206 	    r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12207 				post_p, is_gimple_val, fb_rvalue);
12208 	    r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
12209 				post_p, is_gimple_val, fb_rvalue);
12210 
12211 	    ret = MIN (MIN (r0, r1), r2);
12212 	    recalculate_side_effects (*expr_p);
12213 	  }
12214 	  break;
12215 
12216 	case FMA_EXPR:
12217 	case VEC_PERM_EXPR:
12218 	  /* Classified as tcc_expression.  */
12219 	  goto expr_3;
12220 
12221 	case BIT_INSERT_EXPR:
12222 	  /* Argument 3 is a constant.  */
12223 	  goto expr_2;
12224 
12225 	case POINTER_PLUS_EXPR:
12226 	  {
12227 	    enum gimplify_status r0, r1;
12228 	    r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12229 				post_p, is_gimple_val, fb_rvalue);
12230 	    r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12231 				post_p, is_gimple_val, fb_rvalue);
12232 	    recalculate_side_effects (*expr_p);
12233 	    ret = MIN (r0, r1);
12234 	    break;
12235 	  }
12236 
12237 	default:
12238 	  switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
12239 	    {
12240 	    case tcc_comparison:
12241 	      /* Handle comparison of objects of non scalar mode aggregates
12242 	     	 with a call to memcmp.  It would be nice to only have to do
12243 	     	 this for variable-sized objects, but then we'd have to allow
12244 	     	 the same nest of reference nodes we allow for MODIFY_EXPR and
12245 	     	 that's too complex.
12246 
12247 		 Compare scalar mode aggregates as scalar mode values.  Using
12248 		 memcmp for them would be very inefficient at best, and is
12249 		 plain wrong if bitfields are involved.  */
12250 		{
12251 		  tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
12252 
12253 		  /* Vector comparisons need no boolification.  */
12254 		  if (TREE_CODE (type) == VECTOR_TYPE)
12255 		    goto expr_2;
12256 		  else if (!AGGREGATE_TYPE_P (type))
12257 		    {
12258 		      tree org_type = TREE_TYPE (*expr_p);
12259 		      *expr_p = gimple_boolify (*expr_p);
12260 		      if (!useless_type_conversion_p (org_type,
12261 						      TREE_TYPE (*expr_p)))
12262 			{
12263 			  *expr_p = fold_convert_loc (input_location,
12264 						      org_type, *expr_p);
12265 			  ret = GS_OK;
12266 			}
12267 		      else
12268 			goto expr_2;
12269 		    }
12270 		  else if (TYPE_MODE (type) != BLKmode)
12271 		    ret = gimplify_scalar_mode_aggregate_compare (expr_p);
12272 		  else
12273 		    ret = gimplify_variable_sized_compare (expr_p);
12274 
12275 		  break;
12276 		}
12277 
12278 	    /* If *EXPR_P does not need to be special-cased, handle it
12279 	       according to its class.  */
12280 	    case tcc_unary:
12281 	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12282 				   post_p, is_gimple_val, fb_rvalue);
12283 	      break;
12284 
12285 	    case tcc_binary:
12286 	    expr_2:
12287 	      {
12288 		enum gimplify_status r0, r1;
12289 
12290 		r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12291 		                    post_p, is_gimple_val, fb_rvalue);
12292 		r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12293 				    post_p, is_gimple_val, fb_rvalue);
12294 
12295 		ret = MIN (r0, r1);
12296 		break;
12297 	      }
12298 
12299 	    expr_3:
12300 	      {
12301 		enum gimplify_status r0, r1, r2;
12302 
12303 		r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12304 		                    post_p, is_gimple_val, fb_rvalue);
12305 		r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12306 				    post_p, is_gimple_val, fb_rvalue);
12307 		r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
12308 				    post_p, is_gimple_val, fb_rvalue);
12309 
12310 		ret = MIN (MIN (r0, r1), r2);
12311 		break;
12312 	      }
12313 
12314 	    case tcc_declaration:
12315 	    case tcc_constant:
12316 	      ret = GS_ALL_DONE;
12317 	      goto dont_recalculate;
12318 
12319 	    default:
12320 	      gcc_unreachable ();
12321 	    }
12322 
12323 	  recalculate_side_effects (*expr_p);
12324 
12325 	dont_recalculate:
12326 	  break;
12327 	}
12328 
12329       gcc_assert (*expr_p || ret != GS_OK);
12330     }
12331   while (ret == GS_OK);
12332 
12333   /* If we encountered an error_mark somewhere nested inside, either
12334      stub out the statement or propagate the error back out.  */
12335   if (ret == GS_ERROR)
12336     {
12337       if (is_statement)
12338 	*expr_p = NULL;
12339       goto out;
12340     }
12341 
12342   /* This was only valid as a return value from the langhook, which
12343      we handled.  Make sure it doesn't escape from any other context.  */
12344   gcc_assert (ret != GS_UNHANDLED);
12345 
12346   if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
12347     {
12348       /* We aren't looking for a value, and we don't have a valid
12349 	 statement.  If it doesn't have side-effects, throw it away.
12350 	 We can also get here with code such as "*&&L;", where L is
12351 	 a LABEL_DECL that is marked as FORCED_LABEL.  */
12352       if (TREE_CODE (*expr_p) == LABEL_DECL
12353 	  || !TREE_SIDE_EFFECTS (*expr_p))
12354 	*expr_p = NULL;
12355       else if (!TREE_THIS_VOLATILE (*expr_p))
12356 	{
12357 	  /* This is probably a _REF that contains something nested that
12358 	     has side effects.  Recurse through the operands to find it.  */
12359 	  enum tree_code code = TREE_CODE (*expr_p);
12360 
12361 	  switch (code)
12362 	    {
12363 	    case COMPONENT_REF:
12364 	    case REALPART_EXPR:
12365 	    case IMAGPART_EXPR:
12366 	    case VIEW_CONVERT_EXPR:
12367 	      gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12368 			     gimple_test_f, fallback);
12369 	      break;
12370 
12371 	    case ARRAY_REF:
12372 	    case ARRAY_RANGE_REF:
12373 	      gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12374 			     gimple_test_f, fallback);
12375 	      gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12376 			     gimple_test_f, fallback);
12377 	      break;
12378 
12379 	    default:
12380 	       /* Anything else with side-effects must be converted to
12381 		  a valid statement before we get here.  */
12382 	      gcc_unreachable ();
12383 	    }
12384 
12385 	  *expr_p = NULL;
12386 	}
12387       else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
12388 	       && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
12389 	{
12390 	  /* Historically, the compiler has treated a bare reference
12391 	     to a non-BLKmode volatile lvalue as forcing a load.  */
12392 	  tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
12393 
12394 	  /* Normally, we do not want to create a temporary for a
12395 	     TREE_ADDRESSABLE type because such a type should not be
12396 	     copied by bitwise-assignment.  However, we make an
12397 	     exception here, as all we are doing here is ensuring that
12398 	     we read the bytes that make up the type.  We use
12399 	     create_tmp_var_raw because create_tmp_var will abort when
12400 	     given a TREE_ADDRESSABLE type.  */
12401 	  tree tmp = create_tmp_var_raw (type, "vol");
12402 	  gimple_add_tmp_var (tmp);
12403 	  gimplify_assign (tmp, *expr_p, pre_p);
12404 	  *expr_p = NULL;
12405 	}
12406       else
12407 	/* We can't do anything useful with a volatile reference to
12408 	   an incomplete type, so just throw it away.  Likewise for
12409 	   a BLKmode type, since any implicit inner load should
12410 	   already have been turned into an explicit one by the
12411 	   gimplification process.  */
12412 	*expr_p = NULL;
12413     }
12414 
12415   /* If we are gimplifying at the statement level, we're done.  Tack
12416      everything together and return.  */
12417   if (fallback == fb_none || is_statement)
12418     {
12419       /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12420          it out for GC to reclaim it.  */
12421       *expr_p = NULL_TREE;
12422 
12423       if (!gimple_seq_empty_p (internal_pre)
12424 	  || !gimple_seq_empty_p (internal_post))
12425 	{
12426 	  gimplify_seq_add_seq (&internal_pre, internal_post);
12427 	  gimplify_seq_add_seq (pre_p, internal_pre);
12428 	}
12429 
12430       /* The result of gimplifying *EXPR_P is going to be the last few
12431 	 statements in *PRE_P and *POST_P.  Add location information
12432 	 to all the statements that were added by the gimplification
12433 	 helpers.  */
12434       if (!gimple_seq_empty_p (*pre_p))
12435 	annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
12436 
12437       if (!gimple_seq_empty_p (*post_p))
12438 	annotate_all_with_location_after (*post_p, post_last_gsi,
12439 					  input_location);
12440 
12441       goto out;
12442     }
12443 
12444 #ifdef ENABLE_GIMPLE_CHECKING
12445   if (*expr_p)
12446     {
12447       enum tree_code code = TREE_CODE (*expr_p);
12448       /* These expressions should already be in gimple IR form.  */
12449       gcc_assert (code != MODIFY_EXPR
12450 		  && code != ASM_EXPR
12451 		  && code != BIND_EXPR
12452 		  && code != CATCH_EXPR
12453 		  && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
12454 		  && code != EH_FILTER_EXPR
12455 		  && code != GOTO_EXPR
12456 		  && code != LABEL_EXPR
12457 		  && code != LOOP_EXPR
12458 		  && code != SWITCH_EXPR
12459 		  && code != TRY_FINALLY_EXPR
12460 		  && code != OACC_PARALLEL
12461 		  && code != OACC_KERNELS
12462 		  && code != OACC_DATA
12463 		  && code != OACC_HOST_DATA
12464 		  && code != OACC_DECLARE
12465 		  && code != OACC_UPDATE
12466 		  && code != OACC_ENTER_DATA
12467 		  && code != OACC_EXIT_DATA
12468 		  && code != OACC_CACHE
12469 		  && code != OMP_CRITICAL
12470 		  && code != OMP_FOR
12471 		  && code != OACC_LOOP
12472 		  && code != OMP_MASTER
12473 		  && code != OMP_TASKGROUP
12474 		  && code != OMP_ORDERED
12475 		  && code != OMP_PARALLEL
12476 		  && code != OMP_SECTIONS
12477 		  && code != OMP_SECTION
12478 		  && code != OMP_SINGLE);
12479     }
12480 #endif
12481 
12482   /* Otherwise we're gimplifying a subexpression, so the resulting
12483      value is interesting.  If it's a valid operand that matches
12484      GIMPLE_TEST_F, we're done. Unless we are handling some
12485      post-effects internally; if that's the case, we need to copy into
12486      a temporary before adding the post-effects to POST_P.  */
12487   if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
12488     goto out;
12489 
12490   /* Otherwise, we need to create a new temporary for the gimplified
12491      expression.  */
12492 
12493   /* We can't return an lvalue if we have an internal postqueue.  The
12494      object the lvalue refers to would (probably) be modified by the
12495      postqueue; we need to copy the value out first, which means an
12496      rvalue.  */
12497   if ((fallback & fb_lvalue)
12498       && gimple_seq_empty_p (internal_post)
12499       && is_gimple_addressable (*expr_p))
12500     {
12501       /* An lvalue will do.  Take the address of the expression, store it
12502 	 in a temporary, and replace the expression with an INDIRECT_REF of
12503 	 that temporary.  */
12504       tmp = build_fold_addr_expr_loc (input_location, *expr_p);
12505       gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
12506       *expr_p = build_simple_mem_ref (tmp);
12507     }
12508   else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
12509     {
12510       /* An rvalue will do.  Assign the gimplified expression into a
12511 	 new temporary TMP and replace the original expression with
12512 	 TMP.  First, make sure that the expression has a type so that
12513 	 it can be assigned into a temporary.  */
12514       gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
12515       *expr_p = get_formal_tmp_var (*expr_p, pre_p);
12516     }
12517   else
12518     {
12519 #ifdef ENABLE_GIMPLE_CHECKING
12520       if (!(fallback & fb_mayfail))
12521 	{
12522 	  fprintf (stderr, "gimplification failed:\n");
12523 	  print_generic_expr (stderr, *expr_p);
12524 	  debug_tree (*expr_p);
12525 	  internal_error ("gimplification failed");
12526 	}
12527 #endif
12528       gcc_assert (fallback & fb_mayfail);
12529 
12530       /* If this is an asm statement, and the user asked for the
12531 	 impossible, don't die.  Fail and let gimplify_asm_expr
12532 	 issue an error.  */
12533       ret = GS_ERROR;
12534       goto out;
12535     }
12536 
12537   /* Make sure the temporary matches our predicate.  */
12538   gcc_assert ((*gimple_test_f) (*expr_p));
12539 
12540   if (!gimple_seq_empty_p (internal_post))
12541     {
12542       annotate_all_with_location (internal_post, input_location);
12543       gimplify_seq_add_seq (pre_p, internal_post);
12544     }
12545 
12546  out:
12547   input_location = saved_location;
12548   return ret;
12549 }
12550 
12551 /* Like gimplify_expr but make sure the gimplified result is not itself
12552    a SSA name (but a decl if it were).  Temporaries required by
12553    evaluating *EXPR_P may be still SSA names.  */
12554 
12555 static enum gimplify_status
12556 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12557 	       bool (*gimple_test_f) (tree), fallback_t fallback,
12558 	       bool allow_ssa)
12559 {
12560   bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
12561   enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
12562 					    gimple_test_f, fallback);
12563   if (! allow_ssa
12564       && TREE_CODE (*expr_p) == SSA_NAME)
12565     {
12566       tree name = *expr_p;
12567       if (was_ssa_name_p)
12568 	*expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
12569       else
12570 	{
12571 	  /* Avoid the extra copy if possible.  */
12572 	  *expr_p = create_tmp_reg (TREE_TYPE (name));
12573 	  if (!gimple_nop_p (SSA_NAME_DEF_STMT (name)))
12574 	    gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
12575 	  release_ssa_name (name);
12576 	}
12577     }
12578   return ret;
12579 }
12580 
12581 /* Look through TYPE for variable-sized objects and gimplify each such
12582    size that we find.  Add to LIST_P any statements generated.  */
12583 
12584 void
12585 gimplify_type_sizes (tree type, gimple_seq *list_p)
12586 {
12587   tree field, t;
12588 
12589   if (type == NULL || type == error_mark_node)
12590     return;
12591 
12592   /* We first do the main variant, then copy into any other variants.  */
12593   type = TYPE_MAIN_VARIANT (type);
12594 
12595   /* Avoid infinite recursion.  */
12596   if (TYPE_SIZES_GIMPLIFIED (type))
12597     return;
12598 
12599   TYPE_SIZES_GIMPLIFIED (type) = 1;
12600 
12601   switch (TREE_CODE (type))
12602     {
12603     case INTEGER_TYPE:
12604     case ENUMERAL_TYPE:
12605     case BOOLEAN_TYPE:
12606     case REAL_TYPE:
12607     case FIXED_POINT_TYPE:
12608       gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
12609       gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
12610 
12611       for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12612 	{
12613 	  TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
12614 	  TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
12615 	}
12616       break;
12617 
12618     case ARRAY_TYPE:
12619       /* These types may not have declarations, so handle them here.  */
12620       gimplify_type_sizes (TREE_TYPE (type), list_p);
12621       gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
12622       /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12623 	 with assigned stack slots, for -O1+ -g they should be tracked
12624 	 by VTA.  */
12625       if (!(TYPE_NAME (type)
12626 	    && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12627 	    && DECL_IGNORED_P (TYPE_NAME (type)))
12628 	  && TYPE_DOMAIN (type)
12629 	  && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
12630 	{
12631 	  t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
12632 	  if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12633 	    DECL_IGNORED_P (t) = 0;
12634 	  t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12635 	  if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12636 	    DECL_IGNORED_P (t) = 0;
12637 	}
12638       break;
12639 
12640     case RECORD_TYPE:
12641     case UNION_TYPE:
12642     case QUAL_UNION_TYPE:
12643       for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
12644 	if (TREE_CODE (field) == FIELD_DECL)
12645 	  {
12646 	    gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
12647 	    gimplify_one_sizepos (&DECL_SIZE (field), list_p);
12648 	    gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
12649 	    gimplify_type_sizes (TREE_TYPE (field), list_p);
12650 	  }
12651       break;
12652 
12653     case POINTER_TYPE:
12654     case REFERENCE_TYPE:
12655 	/* We used to recurse on the pointed-to type here, which turned out to
12656 	   be incorrect because its definition might refer to variables not
12657 	   yet initialized at this point if a forward declaration is involved.
12658 
12659 	   It was actually useful for anonymous pointed-to types to ensure
12660 	   that the sizes evaluation dominates every possible later use of the
12661 	   values.  Restricting to such types here would be safe since there
12662 	   is no possible forward declaration around, but would introduce an
12663 	   undesirable middle-end semantic to anonymity.  We then defer to
12664 	   front-ends the responsibility of ensuring that the sizes are
12665 	   evaluated both early and late enough, e.g. by attaching artificial
12666 	   type declarations to the tree.  */
12667       break;
12668 
12669     default:
12670       break;
12671     }
12672 
12673   gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
12674   gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
12675 
12676   for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12677     {
12678       TYPE_SIZE (t) = TYPE_SIZE (type);
12679       TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
12680       TYPE_SIZES_GIMPLIFIED (t) = 1;
12681     }
12682 }
12683 
12684 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12685    a size or position, has had all of its SAVE_EXPRs evaluated.
12686    We add any required statements to *STMT_P.  */
12687 
12688 void
12689 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
12690 {
12691   tree expr = *expr_p;
12692 
12693   /* We don't do anything if the value isn't there, is constant, or contains
12694      A PLACEHOLDER_EXPR.  We also don't want to do anything if it's already
12695      a VAR_DECL.  If it's a VAR_DECL from another function, the gimplifier
12696      will want to replace it with a new variable, but that will cause problems
12697      if this type is from outside the function.  It's OK to have that here.  */
12698   if (expr == NULL_TREE
12699       || is_gimple_constant (expr)
12700       || TREE_CODE (expr) == VAR_DECL
12701       || CONTAINS_PLACEHOLDER_P (expr))
12702     return;
12703 
12704   *expr_p = unshare_expr (expr);
12705 
12706   /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12707      if the def vanishes.  */
12708   gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
12709 
12710   /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
12711      FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
12712      as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs.  */
12713   if (is_gimple_constant (*expr_p))
12714     *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
12715 }
12716 
12717 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12718    containing the sequence of corresponding GIMPLE statements.  If DO_PARMS
12719    is true, also gimplify the parameters.  */
12720 
12721 gbind *
12722 gimplify_body (tree fndecl, bool do_parms)
12723 {
12724   location_t saved_location = input_location;
12725   gimple_seq parm_stmts, parm_cleanup = NULL, seq;
12726   gimple *outer_stmt;
12727   gbind *outer_bind;
12728   struct cgraph_node *cgn;
12729 
12730   timevar_push (TV_TREE_GIMPLIFY);
12731 
12732   init_tree_ssa (cfun);
12733 
12734   /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12735      gimplification.  */
12736   default_rtl_profile ();
12737 
12738   gcc_assert (gimplify_ctxp == NULL);
12739   push_gimplify_context (true);
12740 
12741   if (flag_openacc || flag_openmp)
12742     {
12743       gcc_assert (gimplify_omp_ctxp == NULL);
12744       if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
12745 	gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
12746     }
12747 
12748   /* Unshare most shared trees in the body and in that of any nested functions.
12749      It would seem we don't have to do this for nested functions because
12750      they are supposed to be output and then the outer function gimplified
12751      first, but the g++ front end doesn't always do it that way.  */
12752   unshare_body (fndecl);
12753   unvisit_body (fndecl);
12754 
12755   cgn = cgraph_node::get (fndecl);
12756   if (cgn && cgn->origin)
12757     nonlocal_vlas = new hash_set<tree>;
12758 
12759   /* Make sure input_location isn't set to something weird.  */
12760   input_location = DECL_SOURCE_LOCATION (fndecl);
12761 
12762   /* Resolve callee-copies.  This has to be done before processing
12763      the body so that DECL_VALUE_EXPR gets processed correctly.  */
12764   parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
12765 
12766   /* Gimplify the function's body.  */
12767   seq = NULL;
12768   gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
12769   outer_stmt = gimple_seq_first_stmt (seq);
12770   if (!outer_stmt)
12771     {
12772       outer_stmt = gimple_build_nop ();
12773       gimplify_seq_add_stmt (&seq, outer_stmt);
12774     }
12775 
12776   /* The body must contain exactly one statement, a GIMPLE_BIND.  If this is
12777      not the case, wrap everything in a GIMPLE_BIND to make it so.  */
12778   if (gimple_code (outer_stmt) == GIMPLE_BIND
12779       && gimple_seq_first (seq) == gimple_seq_last (seq))
12780     outer_bind = as_a <gbind *> (outer_stmt);
12781   else
12782     outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
12783 
12784   DECL_SAVED_TREE (fndecl) = NULL_TREE;
12785 
12786   /* If we had callee-copies statements, insert them at the beginning
12787      of the function and clear DECL_VALUE_EXPR_P on the parameters.  */
12788   if (!gimple_seq_empty_p (parm_stmts))
12789     {
12790       tree parm;
12791 
12792       gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
12793       if (parm_cleanup)
12794 	{
12795 	  gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
12796 				      GIMPLE_TRY_FINALLY);
12797 	  parm_stmts = NULL;
12798 	  gimple_seq_add_stmt (&parm_stmts, g);
12799 	}
12800       gimple_bind_set_body (outer_bind, parm_stmts);
12801 
12802       for (parm = DECL_ARGUMENTS (current_function_decl);
12803 	   parm; parm = DECL_CHAIN (parm))
12804 	if (DECL_HAS_VALUE_EXPR_P (parm))
12805 	  {
12806 	    DECL_HAS_VALUE_EXPR_P (parm) = 0;
12807 	    DECL_IGNORED_P (parm) = 0;
12808 	  }
12809     }
12810 
12811   if (nonlocal_vlas)
12812     {
12813       if (nonlocal_vla_vars)
12814 	{
12815 	  /* tree-nested.c may later on call declare_vars (..., true);
12816 	     which relies on BLOCK_VARS chain to be the tail of the
12817 	     gimple_bind_vars chain.  Ensure we don't violate that
12818 	     assumption.  */
12819 	  if (gimple_bind_block (outer_bind)
12820 	      == DECL_INITIAL (current_function_decl))
12821 	    declare_vars (nonlocal_vla_vars, outer_bind, true);
12822 	  else
12823 	    BLOCK_VARS (DECL_INITIAL (current_function_decl))
12824 	      = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
12825 			 nonlocal_vla_vars);
12826 	  nonlocal_vla_vars = NULL_TREE;
12827 	}
12828       delete nonlocal_vlas;
12829       nonlocal_vlas = NULL;
12830     }
12831 
12832   if ((flag_openacc || flag_openmp || flag_openmp_simd)
12833       && gimplify_omp_ctxp)
12834     {
12835       delete_omp_context (gimplify_omp_ctxp);
12836       gimplify_omp_ctxp = NULL;
12837     }
12838 
12839   pop_gimplify_context (outer_bind);
12840   gcc_assert (gimplify_ctxp == NULL);
12841 
12842   if (flag_checking && !seen_error ())
12843     verify_gimple_in_seq (gimple_bind_body (outer_bind));
12844 
12845   timevar_pop (TV_TREE_GIMPLIFY);
12846   input_location = saved_location;
12847 
12848   return outer_bind;
12849 }
12850 
12851 typedef char *char_p; /* For DEF_VEC_P.  */
12852 
12853 /* Return whether we should exclude FNDECL from instrumentation.  */
12854 
12855 static bool
12856 flag_instrument_functions_exclude_p (tree fndecl)
12857 {
12858   vec<char_p> *v;
12859 
12860   v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
12861   if (v && v->length () > 0)
12862     {
12863       const char *name;
12864       int i;
12865       char *s;
12866 
12867       name = lang_hooks.decl_printable_name (fndecl, 0);
12868       FOR_EACH_VEC_ELT (*v, i, s)
12869 	if (strstr (name, s) != NULL)
12870 	  return true;
12871     }
12872 
12873   v = (vec<char_p> *) flag_instrument_functions_exclude_files;
12874   if (v && v->length () > 0)
12875     {
12876       const char *name;
12877       int i;
12878       char *s;
12879 
12880       name = DECL_SOURCE_FILE (fndecl);
12881       FOR_EACH_VEC_ELT (*v, i, s)
12882 	if (strstr (name, s) != NULL)
12883 	  return true;
12884     }
12885 
12886   return false;
12887 }
12888 
12889 /* Entry point to the gimplification pass.  FNDECL is the FUNCTION_DECL
12890    node for the function we want to gimplify.
12891 
12892    Return the sequence of GIMPLE statements corresponding to the body
12893    of FNDECL.  */
12894 
12895 void
12896 gimplify_function_tree (tree fndecl)
12897 {
12898   tree parm, ret;
12899   gimple_seq seq;
12900   gbind *bind;
12901 
12902   gcc_assert (!gimple_body (fndecl));
12903 
12904   if (DECL_STRUCT_FUNCTION (fndecl))
12905     push_cfun (DECL_STRUCT_FUNCTION (fndecl));
12906   else
12907     push_struct_function (fndecl);
12908 
12909   /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12910      if necessary.  */
12911   cfun->curr_properties |= PROP_gimple_lva;
12912 
12913   for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
12914     {
12915       /* Preliminarily mark non-addressed complex variables as eligible
12916          for promotion to gimple registers.  We'll transform their uses
12917          as we find them.  */
12918       if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
12919 	   || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
12920           && !TREE_THIS_VOLATILE (parm)
12921           && !needs_to_live_in_memory (parm))
12922         DECL_GIMPLE_REG_P (parm) = 1;
12923     }
12924 
12925   ret = DECL_RESULT (fndecl);
12926   if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
12927        || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
12928       && !needs_to_live_in_memory (ret))
12929     DECL_GIMPLE_REG_P (ret) = 1;
12930 
12931   if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
12932     asan_poisoned_variables = new hash_set<tree> ();
12933   bind = gimplify_body (fndecl, true);
12934   if (asan_poisoned_variables)
12935     {
12936       delete asan_poisoned_variables;
12937       asan_poisoned_variables = NULL;
12938     }
12939 
12940   /* The tree body of the function is no longer needed, replace it
12941      with the new GIMPLE body.  */
12942   seq = NULL;
12943   gimple_seq_add_stmt (&seq, bind);
12944   gimple_set_body (fndecl, seq);
12945 
12946   /* If we're instrumenting function entry/exit, then prepend the call to
12947      the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12948      catch the exit hook.  */
12949   /* ??? Add some way to ignore exceptions for this TFE.  */
12950   if (flag_instrument_function_entry_exit
12951       && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
12952       /* Do not instrument extern inline functions.  */
12953       && !(DECL_DECLARED_INLINE_P (fndecl)
12954 	   && DECL_EXTERNAL (fndecl)
12955 	   && DECL_DISREGARD_INLINE_LIMITS (fndecl))
12956       && !flag_instrument_functions_exclude_p (fndecl))
12957     {
12958       tree x;
12959       gbind *new_bind;
12960       gimple *tf;
12961       gimple_seq cleanup = NULL, body = NULL;
12962       tree tmp_var;
12963       gcall *call;
12964 
12965       x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12966       call = gimple_build_call (x, 1, integer_zero_node);
12967       tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12968       gimple_call_set_lhs (call, tmp_var);
12969       gimplify_seq_add_stmt (&cleanup, call);
12970       x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
12971       call = gimple_build_call (x, 2,
12972 				build_fold_addr_expr (current_function_decl),
12973 				tmp_var);
12974       gimplify_seq_add_stmt (&cleanup, call);
12975       tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
12976 
12977       x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12978       call = gimple_build_call (x, 1, integer_zero_node);
12979       tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12980       gimple_call_set_lhs (call, tmp_var);
12981       gimplify_seq_add_stmt (&body, call);
12982       x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
12983       call = gimple_build_call (x, 2,
12984 				build_fold_addr_expr (current_function_decl),
12985 				tmp_var);
12986       gimplify_seq_add_stmt (&body, call);
12987       gimplify_seq_add_stmt (&body, tf);
12988       new_bind = gimple_build_bind (NULL, body, NULL);
12989 
12990       /* Replace the current function body with the body
12991          wrapped in the try/finally TF.  */
12992       seq = NULL;
12993       gimple_seq_add_stmt (&seq, new_bind);
12994       gimple_set_body (fndecl, seq);
12995       bind = new_bind;
12996     }
12997 
12998   if (sanitize_flags_p (SANITIZE_THREAD))
12999     {
13000       gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
13001       gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
13002       gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
13003       /* Replace the current function body with the body
13004 	 wrapped in the try/finally TF.  */
13005       seq = NULL;
13006       gimple_seq_add_stmt (&seq, new_bind);
13007       gimple_set_body (fndecl, seq);
13008     }
13009 
13010   DECL_SAVED_TREE (fndecl) = NULL_TREE;
13011   cfun->curr_properties |= PROP_gimple_any;
13012 
13013   pop_cfun ();
13014 
13015   dump_function (TDI_gimple, fndecl);
13016 }
13017 
13018 /* Return a dummy expression of type TYPE in order to keep going after an
13019    error.  */
13020 
13021 static tree
13022 dummy_object (tree type)
13023 {
13024   tree t = build_int_cst (build_pointer_type (type), 0);
13025   return build2 (MEM_REF, type, t, t);
13026 }
13027 
13028 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
13029    builtin function, but a very special sort of operator.  */
13030 
13031 enum gimplify_status
13032 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
13033 		      gimple_seq *post_p ATTRIBUTE_UNUSED)
13034 {
13035   tree promoted_type, have_va_type;
13036   tree valist = TREE_OPERAND (*expr_p, 0);
13037   tree type = TREE_TYPE (*expr_p);
13038   tree t, tag, aptag;
13039   location_t loc = EXPR_LOCATION (*expr_p);
13040 
13041   /* Verify that valist is of the proper type.  */
13042   have_va_type = TREE_TYPE (valist);
13043   if (have_va_type == error_mark_node)
13044     return GS_ERROR;
13045   have_va_type = targetm.canonical_va_list_type (have_va_type);
13046   if (have_va_type == NULL_TREE
13047       && POINTER_TYPE_P (TREE_TYPE (valist)))
13048     /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg.  */
13049     have_va_type
13050       = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
13051   gcc_assert (have_va_type != NULL_TREE);
13052 
13053   /* Generate a diagnostic for requesting data of a type that cannot
13054      be passed through `...' due to type promotion at the call site.  */
13055   if ((promoted_type = lang_hooks.types.type_promotes_to (type))
13056 	   != type)
13057     {
13058       static bool gave_help;
13059       bool warned;
13060       /* Use the expansion point to handle cases such as passing bool (defined
13061 	 in a system header) through `...'.  */
13062       source_location xloc
13063 	= expansion_point_location_if_in_system_header (loc);
13064 
13065       /* Unfortunately, this is merely undefined, rather than a constraint
13066 	 violation, so we cannot make this an error.  If this call is never
13067 	 executed, the program is still strictly conforming.  */
13068       warned = warning_at (xloc, 0,
13069 			   "%qT is promoted to %qT when passed through %<...%>",
13070 			   type, promoted_type);
13071       if (!gave_help && warned)
13072 	{
13073 	  gave_help = true;
13074 	  inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
13075 		  promoted_type, type);
13076 	}
13077 
13078       /* We can, however, treat "undefined" any way we please.
13079 	 Call abort to encourage the user to fix the program.  */
13080       if (warned)
13081 	inform (xloc, "if this code is reached, the program will abort");
13082       /* Before the abort, allow the evaluation of the va_list
13083 	 expression to exit or longjmp.  */
13084       gimplify_and_add (valist, pre_p);
13085       t = build_call_expr_loc (loc,
13086 			       builtin_decl_implicit (BUILT_IN_TRAP), 0);
13087       gimplify_and_add (t, pre_p);
13088 
13089       /* This is dead code, but go ahead and finish so that the
13090 	 mode of the result comes out right.  */
13091       *expr_p = dummy_object (type);
13092       return GS_ALL_DONE;
13093     }
13094 
13095   tag = build_int_cst (build_pointer_type (type), 0);
13096   aptag = build_int_cst (TREE_TYPE (valist), 0);
13097 
13098   *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
13099 					  valist, tag, aptag);
13100 
13101   /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
13102      needs to be expanded.  */
13103   cfun->curr_properties &= ~PROP_gimple_lva;
13104 
13105   return GS_OK;
13106 }
13107 
13108 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
13109 
13110    DST/SRC are the destination and source respectively.  You can pass
13111    ungimplified trees in DST or SRC, in which case they will be
13112    converted to a gimple operand if necessary.
13113 
13114    This function returns the newly created GIMPLE_ASSIGN tuple.  */
13115 
13116 gimple *
13117 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
13118 {
13119   tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
13120   gimplify_and_add (t, seq_p);
13121   ggc_free (t);
13122   return gimple_seq_last_stmt (*seq_p);
13123 }
13124 
13125 inline hashval_t
13126 gimplify_hasher::hash (const elt_t *p)
13127 {
13128   tree t = p->val;
13129   return iterative_hash_expr (t, 0);
13130 }
13131 
13132 inline bool
13133 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
13134 {
13135   tree t1 = p1->val;
13136   tree t2 = p2->val;
13137   enum tree_code code = TREE_CODE (t1);
13138 
13139   if (TREE_CODE (t2) != code
13140       || TREE_TYPE (t1) != TREE_TYPE (t2))
13141     return false;
13142 
13143   if (!operand_equal_p (t1, t2, 0))
13144     return false;
13145 
13146   /* Only allow them to compare equal if they also hash equal; otherwise
13147      results are nondeterminate, and we fail bootstrap comparison.  */
13148   gcc_checking_assert (hash (p1) == hash (p2));
13149 
13150   return true;
13151 }
13152