xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/omp-low.c (revision cef8759bd76c1b621f8eab8faa6f208faabc2e15)
1 /* Lowering pass for OMP directives.  Converts OMP directives into explicit
2    calls to the runtime library (libgomp), data marshalling to implement data
3    sharing and copying clauses, offloading to accelerators, and more.
4 
5    Contributed by Diego Novillo <dnovillo@redhat.com>
6 
7    Copyright (C) 2005-2017 Free Software Foundation, Inc.
8 
9 This file is part of GCC.
10 
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15 
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
19 for more details.
20 
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3.  If not see
23 <http://www.gnu.org/licenses/>.  */
24 
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 
62 /* Lowering of OMP parallel and workshare constructs proceeds in two
63    phases.  The first phase scans the function looking for OMP statements
64    and then for variables that must be replaced to satisfy data sharing
65    clauses.  The second phase expands code for the constructs, as well as
66    re-gimplifying things when variables have been replaced with complex
67    expressions.
68 
69    Final code generation is done by pass_expand_omp.  The flowgraph is
70    scanned for regions which are then moved to a new
71    function, to be invoked by the thread library, or offloaded.  */
72 
73 /* Context structure.  Used to store information about each parallel
74    directive in the code.  */
75 
76 struct omp_context
77 {
78   /* This field must be at the beginning, as we do "inheritance": Some
79      callback functions for tree-inline.c (e.g., omp_copy_decl)
80      receive a copy_body_data pointer that is up-casted to an
81      omp_context pointer.  */
82   copy_body_data cb;
83 
84   /* The tree of contexts corresponding to the encountered constructs.  */
85   struct omp_context *outer;
86   gimple *stmt;
87 
88   /* Map variables to fields in a structure that allows communication
89      between sending and receiving threads.  */
90   splay_tree field_map;
91   tree record_type;
92   tree sender_decl;
93   tree receiver_decl;
94 
95   /* These are used just by task contexts, if task firstprivate fn is
96      needed.  srecord_type is used to communicate from the thread
97      that encountered the task construct to task firstprivate fn,
98      record_type is allocated by GOMP_task, initialized by task firstprivate
99      fn and passed to the task body fn.  */
100   splay_tree sfield_map;
101   tree srecord_type;
102 
103   /* A chain of variables to add to the top-level block surrounding the
104      construct.  In the case of a parallel, this is in the child function.  */
105   tree block_vars;
106 
107   /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
108      barriers should jump to during omplower pass.  */
109   tree cancel_label;
110 
111   /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
112      otherwise.  */
113   gimple *simt_stmt;
114 
115   /* What to do with variables with implicitly determined sharing
116      attributes.  */
117   enum omp_clause_default_kind default_kind;
118 
119   /* Nesting depth of this context.  Used to beautify error messages re
120      invalid gotos.  The outermost ctx is depth 1, with depth 0 being
121      reserved for the main body of the function.  */
122   int depth;
123 
124   /* True if this parallel directive is nested within another.  */
125   bool is_nested;
126 
127   /* True if this construct can be cancelled.  */
128   bool cancellable;
129 };
130 
131 static splay_tree all_contexts;
132 static int taskreg_nesting_level;
133 static int target_nesting_level;
134 static bitmap task_shared_vars;
135 static vec<omp_context *> taskreg_contexts;
136 
137 static void scan_omp (gimple_seq *, omp_context *);
138 static tree scan_omp_1_op (tree *, int *, void *);
139 
140 #define WALK_SUBSTMTS  \
141     case GIMPLE_BIND: \
142     case GIMPLE_TRY: \
143     case GIMPLE_CATCH: \
144     case GIMPLE_EH_FILTER: \
145     case GIMPLE_TRANSACTION: \
146       /* The sub-statements for these should be walked.  */ \
147       *handled_ops_p = false; \
148       break;
149 
150 /* Return true if CTX corresponds to an oacc parallel region.  */
151 
152 static bool
153 is_oacc_parallel (omp_context *ctx)
154 {
155   enum gimple_code outer_type = gimple_code (ctx->stmt);
156   return ((outer_type == GIMPLE_OMP_TARGET)
157 	  && (gimple_omp_target_kind (ctx->stmt)
158 	      == GF_OMP_TARGET_KIND_OACC_PARALLEL));
159 }
160 
161 /* Return true if CTX corresponds to an oacc kernels region.  */
162 
163 static bool
164 is_oacc_kernels (omp_context *ctx)
165 {
166   enum gimple_code outer_type = gimple_code (ctx->stmt);
167   return ((outer_type == GIMPLE_OMP_TARGET)
168 	  && (gimple_omp_target_kind (ctx->stmt)
169 	      == GF_OMP_TARGET_KIND_OACC_KERNELS));
170 }
171 
172 /* If DECL is the artificial dummy VAR_DECL created for non-static
173    data member privatization, return the underlying "this" parameter,
174    otherwise return NULL.  */
175 
176 tree
177 omp_member_access_dummy_var (tree decl)
178 {
179   if (!VAR_P (decl)
180       || !DECL_ARTIFICIAL (decl)
181       || !DECL_IGNORED_P (decl)
182       || !DECL_HAS_VALUE_EXPR_P (decl)
183       || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
184     return NULL_TREE;
185 
186   tree v = DECL_VALUE_EXPR (decl);
187   if (TREE_CODE (v) != COMPONENT_REF)
188     return NULL_TREE;
189 
190   while (1)
191     switch (TREE_CODE (v))
192       {
193       case COMPONENT_REF:
194       case MEM_REF:
195       case INDIRECT_REF:
196       CASE_CONVERT:
197       case POINTER_PLUS_EXPR:
198 	v = TREE_OPERAND (v, 0);
199 	continue;
200       case PARM_DECL:
201 	if (DECL_CONTEXT (v) == current_function_decl
202 	    && DECL_ARTIFICIAL (v)
203 	    && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
204 	  return v;
205 	return NULL_TREE;
206       default:
207 	return NULL_TREE;
208       }
209 }
210 
211 /* Helper for unshare_and_remap, called through walk_tree.  */
212 
213 static tree
214 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
215 {
216   tree *pair = (tree *) data;
217   if (*tp == pair[0])
218     {
219       *tp = unshare_expr (pair[1]);
220       *walk_subtrees = 0;
221     }
222   else if (IS_TYPE_OR_DECL_P (*tp))
223     *walk_subtrees = 0;
224   return NULL_TREE;
225 }
226 
227 /* Return unshare_expr (X) with all occurrences of FROM
228    replaced with TO.  */
229 
230 static tree
231 unshare_and_remap (tree x, tree from, tree to)
232 {
233   tree pair[2] = { from, to };
234   x = unshare_expr (x);
235   walk_tree (&x, unshare_and_remap_1, pair, NULL);
236   return x;
237 }
238 
239 /* Convenience function for calling scan_omp_1_op on tree operands.  */
240 
241 static inline tree
242 scan_omp_op (tree *tp, omp_context *ctx)
243 {
244   struct walk_stmt_info wi;
245 
246   memset (&wi, 0, sizeof (wi));
247   wi.info = ctx;
248   wi.want_locations = true;
249 
250   return walk_tree (tp, scan_omp_1_op, &wi, NULL);
251 }
252 
253 static void lower_omp (gimple_seq *, omp_context *);
254 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
255 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
256 
257 /* Return true if CTX is for an omp parallel.  */
258 
259 static inline bool
260 is_parallel_ctx (omp_context *ctx)
261 {
262   return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
263 }
264 
265 
266 /* Return true if CTX is for an omp task.  */
267 
268 static inline bool
269 is_task_ctx (omp_context *ctx)
270 {
271   return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
272 }
273 
274 
275 /* Return true if CTX is for an omp taskloop.  */
276 
277 static inline bool
278 is_taskloop_ctx (omp_context *ctx)
279 {
280   return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
281 	 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
282 }
283 
284 
285 /* Return true if CTX is for an omp parallel or omp task.  */
286 
287 static inline bool
288 is_taskreg_ctx (omp_context *ctx)
289 {
290   return is_parallel_ctx (ctx) || is_task_ctx (ctx);
291 }
292 
293 /* Return true if EXPR is variable sized.  */
294 
295 static inline bool
296 is_variable_sized (const_tree expr)
297 {
298   return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
299 }
300 
301 /* Lookup variables.  The "maybe" form
302    allows for the variable form to not have been entered, otherwise we
303    assert that the variable must have been entered.  */
304 
305 static inline tree
306 lookup_decl (tree var, omp_context *ctx)
307 {
308   tree *n = ctx->cb.decl_map->get (var);
309   return *n;
310 }
311 
312 static inline tree
313 maybe_lookup_decl (const_tree var, omp_context *ctx)
314 {
315   tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
316   return n ? *n : NULL_TREE;
317 }
318 
319 static inline tree
320 lookup_field (tree var, omp_context *ctx)
321 {
322   splay_tree_node n;
323   n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
324   return (tree) n->value;
325 }
326 
327 static inline tree
328 lookup_sfield (splay_tree_key key, omp_context *ctx)
329 {
330   splay_tree_node n;
331   n = splay_tree_lookup (ctx->sfield_map
332 			 ? ctx->sfield_map : ctx->field_map, key);
333   return (tree) n->value;
334 }
335 
336 static inline tree
337 lookup_sfield (tree var, omp_context *ctx)
338 {
339   return lookup_sfield ((splay_tree_key) var, ctx);
340 }
341 
342 static inline tree
343 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
344 {
345   splay_tree_node n;
346   n = splay_tree_lookup (ctx->field_map, key);
347   return n ? (tree) n->value : NULL_TREE;
348 }
349 
350 static inline tree
351 maybe_lookup_field (tree var, omp_context *ctx)
352 {
353   return maybe_lookup_field ((splay_tree_key) var, ctx);
354 }
355 
356 /* Return true if DECL should be copied by pointer.  SHARED_CTX is
357    the parallel context if DECL is to be shared.  */
358 
359 static bool
360 use_pointer_for_field (tree decl, omp_context *shared_ctx)
361 {
362   if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
363       || TYPE_ATOMIC (TREE_TYPE (decl)))
364     return true;
365 
366   /* We can only use copy-in/copy-out semantics for shared variables
367      when we know the value is not accessible from an outer scope.  */
368   if (shared_ctx)
369     {
370       gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
371 
372       /* ??? Trivially accessible from anywhere.  But why would we even
373 	 be passing an address in this case?  Should we simply assert
374 	 this to be false, or should we have a cleanup pass that removes
375 	 these from the list of mappings?  */
376       if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
377 	return true;
378 
379       /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
380 	 without analyzing the expression whether or not its location
381 	 is accessible to anyone else.  In the case of nested parallel
382 	 regions it certainly may be.  */
383       if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
384 	return true;
385 
386       /* Do not use copy-in/copy-out for variables that have their
387 	 address taken.  */
388       if (TREE_ADDRESSABLE (decl))
389 	return true;
390 
391       /* lower_send_shared_vars only uses copy-in, but not copy-out
392 	 for these.  */
393       if (TREE_READONLY (decl)
394 	  || ((TREE_CODE (decl) == RESULT_DECL
395 	       || TREE_CODE (decl) == PARM_DECL)
396 	      && DECL_BY_REFERENCE (decl)))
397 	return false;
398 
399       /* Disallow copy-in/out in nested parallel if
400 	 decl is shared in outer parallel, otherwise
401 	 each thread could store the shared variable
402 	 in its own copy-in location, making the
403 	 variable no longer really shared.  */
404       if (shared_ctx->is_nested)
405 	{
406 	  omp_context *up;
407 
408 	  for (up = shared_ctx->outer; up; up = up->outer)
409 	    if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
410 	      break;
411 
412 	  if (up)
413 	    {
414 	      tree c;
415 
416 	      for (c = gimple_omp_taskreg_clauses (up->stmt);
417 		   c; c = OMP_CLAUSE_CHAIN (c))
418 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
419 		    && OMP_CLAUSE_DECL (c) == decl)
420 		  break;
421 
422 	      if (c)
423 		goto maybe_mark_addressable_and_ret;
424 	    }
425 	}
426 
427       /* For tasks avoid using copy-in/out.  As tasks can be
428 	 deferred or executed in different thread, when GOMP_task
429 	 returns, the task hasn't necessarily terminated.  */
430       if (is_task_ctx (shared_ctx))
431 	{
432 	  tree outer;
433 	maybe_mark_addressable_and_ret:
434 	  outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
435 	  if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
436 	    {
437 	      /* Taking address of OUTER in lower_send_shared_vars
438 		 might need regimplification of everything that uses the
439 		 variable.  */
440 	      if (!task_shared_vars)
441 		task_shared_vars = BITMAP_ALLOC (NULL);
442 	      bitmap_set_bit (task_shared_vars, DECL_UID (outer));
443 	      TREE_ADDRESSABLE (outer) = 1;
444 	    }
445 	  return true;
446 	}
447     }
448 
449   return false;
450 }
451 
452 /* Construct a new automatic decl similar to VAR.  */
453 
454 static tree
455 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
456 {
457   tree copy = copy_var_decl (var, name, type);
458 
459   DECL_CONTEXT (copy) = current_function_decl;
460   DECL_CHAIN (copy) = ctx->block_vars;
461   /* If VAR is listed in task_shared_vars, it means it wasn't
462      originally addressable and is just because task needs to take
463      it's address.  But we don't need to take address of privatizations
464      from that var.  */
465   if (TREE_ADDRESSABLE (var)
466       && task_shared_vars
467       && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
468     TREE_ADDRESSABLE (copy) = 0;
469   ctx->block_vars = copy;
470 
471   return copy;
472 }
473 
474 static tree
475 omp_copy_decl_1 (tree var, omp_context *ctx)
476 {
477   return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
478 }
479 
480 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
481    as appropriate.  */
482 static tree
483 omp_build_component_ref (tree obj, tree field)
484 {
485   tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
486   if (TREE_THIS_VOLATILE (field))
487     TREE_THIS_VOLATILE (ret) |= 1;
488   if (TREE_READONLY (field))
489     TREE_READONLY (ret) |= 1;
490   return ret;
491 }
492 
493 /* Build tree nodes to access the field for VAR on the receiver side.  */
494 
495 static tree
496 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
497 {
498   tree x, field = lookup_field (var, ctx);
499 
500   /* If the receiver record type was remapped in the child function,
501      remap the field into the new record type.  */
502   x = maybe_lookup_field (field, ctx);
503   if (x != NULL)
504     field = x;
505 
506   x = build_simple_mem_ref (ctx->receiver_decl);
507   TREE_THIS_NOTRAP (x) = 1;
508   x = omp_build_component_ref (x, field);
509   if (by_ref)
510     {
511       x = build_simple_mem_ref (x);
512       TREE_THIS_NOTRAP (x) = 1;
513     }
514 
515   return x;
516 }
517 
518 /* Build tree nodes to access VAR in the scope outer to CTX.  In the case
519    of a parallel, this is a component reference; for workshare constructs
520    this is some variable.  */
521 
522 static tree
523 build_outer_var_ref (tree var, omp_context *ctx,
524 		     enum omp_clause_code code = OMP_CLAUSE_ERROR)
525 {
526   tree x;
527 
528   if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
529     x = var;
530   else if (is_variable_sized (var))
531     {
532       x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
533       x = build_outer_var_ref (x, ctx, code);
534       x = build_simple_mem_ref (x);
535     }
536   else if (is_taskreg_ctx (ctx))
537     {
538       bool by_ref = use_pointer_for_field (var, NULL);
539       x = build_receiver_ref (var, by_ref, ctx);
540     }
541   else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
542 	    && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
543 	   || (code == OMP_CLAUSE_PRIVATE
544 	       && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
545 		   || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
546 		   || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
547     {
548       /* #pragma omp simd isn't a worksharing construct, and can reference
549 	 even private vars in its linear etc. clauses.
550 	 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
551 	 to private vars in all worksharing constructs.  */
552       x = NULL_TREE;
553       if (ctx->outer && is_taskreg_ctx (ctx))
554 	x = lookup_decl (var, ctx->outer);
555       else if (ctx->outer)
556 	x = maybe_lookup_decl_in_outer_ctx (var, ctx);
557       if (x == NULL_TREE)
558 	x = var;
559     }
560   else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
561     {
562       gcc_assert (ctx->outer);
563       splay_tree_node n
564 	= splay_tree_lookup (ctx->outer->field_map,
565 			     (splay_tree_key) &DECL_UID (var));
566       if (n == NULL)
567 	{
568 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
569 	    x = var;
570 	  else
571 	    x = lookup_decl (var, ctx->outer);
572 	}
573       else
574 	{
575 	  tree field = (tree) n->value;
576 	  /* If the receiver record type was remapped in the child function,
577 	     remap the field into the new record type.  */
578 	  x = maybe_lookup_field (field, ctx->outer);
579 	  if (x != NULL)
580 	    field = x;
581 
582 	  x = build_simple_mem_ref (ctx->outer->receiver_decl);
583 	  x = omp_build_component_ref (x, field);
584 	  if (use_pointer_for_field (var, ctx->outer))
585 	    x = build_simple_mem_ref (x);
586 	}
587     }
588   else if (ctx->outer)
589     {
590       omp_context *outer = ctx->outer;
591       if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
592 	{
593 	  outer = outer->outer;
594 	  gcc_assert (outer
595 		      && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
596 	}
597       x = lookup_decl (var, outer);
598     }
599   else if (omp_is_reference (var))
600     /* This can happen with orphaned constructs.  If var is reference, it is
601        possible it is shared and as such valid.  */
602     x = var;
603   else if (omp_member_access_dummy_var (var))
604     x = var;
605   else
606     gcc_unreachable ();
607 
608   if (x == var)
609     {
610       tree t = omp_member_access_dummy_var (var);
611       if (t)
612 	{
613 	  x = DECL_VALUE_EXPR (var);
614 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
615 	  if (o != t)
616 	    x = unshare_and_remap (x, t, o);
617 	  else
618 	    x = unshare_expr (x);
619 	}
620     }
621 
622   if (omp_is_reference (var))
623     x = build_simple_mem_ref (x);
624 
625   return x;
626 }
627 
628 /* Build tree nodes to access the field for VAR on the sender side.  */
629 
630 static tree
631 build_sender_ref (splay_tree_key key, omp_context *ctx)
632 {
633   tree field = lookup_sfield (key, ctx);
634   return omp_build_component_ref (ctx->sender_decl, field);
635 }
636 
637 static tree
638 build_sender_ref (tree var, omp_context *ctx)
639 {
640   return build_sender_ref ((splay_tree_key) var, ctx);
641 }
642 
643 /* Add a new field for VAR inside the structure CTX->SENDER_DECL.  If
644    BASE_POINTERS_RESTRICT, declare the field with restrict.  */
645 
646 static void
647 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
648 		   bool base_pointers_restrict = false)
649 {
650   tree field, type, sfield = NULL_TREE;
651   splay_tree_key key = (splay_tree_key) var;
652 
653   if ((mask & 8) != 0)
654     {
655       key = (splay_tree_key) &DECL_UID (var);
656       gcc_checking_assert (key != (splay_tree_key) var);
657     }
658   gcc_assert ((mask & 1) == 0
659 	      || !splay_tree_lookup (ctx->field_map, key));
660   gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
661 	      || !splay_tree_lookup (ctx->sfield_map, key));
662   gcc_assert ((mask & 3) == 3
663 	      || !is_gimple_omp_oacc (ctx->stmt));
664 
665   type = TREE_TYPE (var);
666   /* Prevent redeclaring the var in the split-off function with a restrict
667      pointer type.  Note that we only clear type itself, restrict qualifiers in
668      the pointed-to type will be ignored by points-to analysis.  */
669   if (POINTER_TYPE_P (type)
670       && TYPE_RESTRICT (type))
671     type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
672 
673   if (mask & 4)
674     {
675       gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
676       type = build_pointer_type (build_pointer_type (type));
677     }
678   else if (by_ref)
679     {
680       type = build_pointer_type (type);
681       if (base_pointers_restrict)
682 	type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
683     }
684   else if ((mask & 3) == 1 && omp_is_reference (var))
685     type = TREE_TYPE (type);
686 
687   field = build_decl (DECL_SOURCE_LOCATION (var),
688 		      FIELD_DECL, DECL_NAME (var), type);
689 
690   /* Remember what variable this field was created for.  This does have a
691      side effect of making dwarf2out ignore this member, so for helpful
692      debugging we clear it later in delete_omp_context.  */
693   DECL_ABSTRACT_ORIGIN (field) = var;
694   if (type == TREE_TYPE (var))
695     {
696       SET_DECL_ALIGN (field, DECL_ALIGN (var));
697       DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
698       TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
699     }
700   else
701     SET_DECL_ALIGN (field, TYPE_ALIGN (type));
702 
703   if ((mask & 3) == 3)
704     {
705       insert_field_into_struct (ctx->record_type, field);
706       if (ctx->srecord_type)
707 	{
708 	  sfield = build_decl (DECL_SOURCE_LOCATION (var),
709 			       FIELD_DECL, DECL_NAME (var), type);
710 	  DECL_ABSTRACT_ORIGIN (sfield) = var;
711 	  SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
712 	  DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
713 	  TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
714 	  insert_field_into_struct (ctx->srecord_type, sfield);
715 	}
716     }
717   else
718     {
719       if (ctx->srecord_type == NULL_TREE)
720 	{
721 	  tree t;
722 
723 	  ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
724 	  ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
725 	  for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
726 	    {
727 	      sfield = build_decl (DECL_SOURCE_LOCATION (t),
728 				   FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
729 	      DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
730 	      insert_field_into_struct (ctx->srecord_type, sfield);
731 	      splay_tree_insert (ctx->sfield_map,
732 				 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
733 				 (splay_tree_value) sfield);
734 	    }
735 	}
736       sfield = field;
737       insert_field_into_struct ((mask & 1) ? ctx->record_type
738 				: ctx->srecord_type, field);
739     }
740 
741   if (mask & 1)
742     splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
743   if ((mask & 2) && ctx->sfield_map)
744     splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
745 }
746 
747 static tree
748 install_var_local (tree var, omp_context *ctx)
749 {
750   tree new_var = omp_copy_decl_1 (var, ctx);
751   insert_decl_map (&ctx->cb, var, new_var);
752   return new_var;
753 }
754 
755 /* Adjust the replacement for DECL in CTX for the new context.  This means
756    copying the DECL_VALUE_EXPR, and fixing up the type.  */
757 
758 static void
759 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
760 {
761   tree new_decl, size;
762 
763   new_decl = lookup_decl (decl, ctx);
764 
765   TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
766 
767   if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
768       && DECL_HAS_VALUE_EXPR_P (decl))
769     {
770       tree ve = DECL_VALUE_EXPR (decl);
771       walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
772       SET_DECL_VALUE_EXPR (new_decl, ve);
773       DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
774     }
775 
776   if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
777     {
778       size = remap_decl (DECL_SIZE (decl), &ctx->cb);
779       if (size == error_mark_node)
780 	size = TYPE_SIZE (TREE_TYPE (new_decl));
781       DECL_SIZE (new_decl) = size;
782 
783       size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
784       if (size == error_mark_node)
785 	size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
786       DECL_SIZE_UNIT (new_decl) = size;
787     }
788 }
789 
790 /* The callback for remap_decl.  Search all containing contexts for a
791    mapping of the variable; this avoids having to duplicate the splay
792    tree ahead of time.  We know a mapping doesn't already exist in the
793    given context.  Create new mappings to implement default semantics.  */
794 
795 static tree
796 omp_copy_decl (tree var, copy_body_data *cb)
797 {
798   omp_context *ctx = (omp_context *) cb;
799   tree new_var;
800 
801   if (TREE_CODE (var) == LABEL_DECL)
802     {
803       if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
804 	return var;
805       new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
806       DECL_CONTEXT (new_var) = current_function_decl;
807       insert_decl_map (&ctx->cb, var, new_var);
808       return new_var;
809     }
810 
811   while (!is_taskreg_ctx (ctx))
812     {
813       ctx = ctx->outer;
814       if (ctx == NULL)
815 	return var;
816       new_var = maybe_lookup_decl (var, ctx);
817       if (new_var)
818 	return new_var;
819     }
820 
821   if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
822     return var;
823 
824   return error_mark_node;
825 }
826 
827 /* Create a new context, with OUTER_CTX being the surrounding context.  */
828 
829 static omp_context *
830 new_omp_context (gimple *stmt, omp_context *outer_ctx)
831 {
832   omp_context *ctx = XCNEW (omp_context);
833 
834   splay_tree_insert (all_contexts, (splay_tree_key) stmt,
835 		     (splay_tree_value) ctx);
836   ctx->stmt = stmt;
837 
838   if (outer_ctx)
839     {
840       ctx->outer = outer_ctx;
841       ctx->cb = outer_ctx->cb;
842       ctx->cb.block = NULL;
843       ctx->depth = outer_ctx->depth + 1;
844     }
845   else
846     {
847       ctx->cb.src_fn = current_function_decl;
848       ctx->cb.dst_fn = current_function_decl;
849       ctx->cb.src_node = cgraph_node::get (current_function_decl);
850       gcc_checking_assert (ctx->cb.src_node);
851       ctx->cb.dst_node = ctx->cb.src_node;
852       ctx->cb.src_cfun = cfun;
853       ctx->cb.copy_decl = omp_copy_decl;
854       ctx->cb.eh_lp_nr = 0;
855       ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
856       ctx->cb.dont_remap_vla_if_no_change = true;
857       ctx->depth = 1;
858     }
859 
860   ctx->cb.decl_map = new hash_map<tree, tree>;
861 
862   return ctx;
863 }
864 
865 static gimple_seq maybe_catch_exception (gimple_seq);
866 
867 /* Finalize task copyfn.  */
868 
869 static void
870 finalize_task_copyfn (gomp_task *task_stmt)
871 {
872   struct function *child_cfun;
873   tree child_fn;
874   gimple_seq seq = NULL, new_seq;
875   gbind *bind;
876 
877   child_fn = gimple_omp_task_copy_fn (task_stmt);
878   if (child_fn == NULL_TREE)
879     return;
880 
881   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
882   DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
883 
884   push_cfun (child_cfun);
885   bind = gimplify_body (child_fn, false);
886   gimple_seq_add_stmt (&seq, bind);
887   new_seq = maybe_catch_exception (seq);
888   if (new_seq != seq)
889     {
890       bind = gimple_build_bind (NULL, new_seq, NULL);
891       seq = NULL;
892       gimple_seq_add_stmt (&seq, bind);
893     }
894   gimple_set_body (child_fn, seq);
895   pop_cfun ();
896 
897   /* Inform the callgraph about the new function.  */
898   cgraph_node *node = cgraph_node::get_create (child_fn);
899   node->parallelized_function = 1;
900   cgraph_node::add_new_function (child_fn, false);
901 }
902 
903 /* Destroy a omp_context data structures.  Called through the splay tree
904    value delete callback.  */
905 
906 static void
907 delete_omp_context (splay_tree_value value)
908 {
909   omp_context *ctx = (omp_context *) value;
910 
911   delete ctx->cb.decl_map;
912 
913   if (ctx->field_map)
914     splay_tree_delete (ctx->field_map);
915   if (ctx->sfield_map)
916     splay_tree_delete (ctx->sfield_map);
917 
918   /* We hijacked DECL_ABSTRACT_ORIGIN earlier.  We need to clear it before
919      it produces corrupt debug information.  */
920   if (ctx->record_type)
921     {
922       tree t;
923       for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
924 	DECL_ABSTRACT_ORIGIN (t) = NULL;
925     }
926   if (ctx->srecord_type)
927     {
928       tree t;
929       for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
930 	DECL_ABSTRACT_ORIGIN (t) = NULL;
931     }
932 
933   if (is_task_ctx (ctx))
934     finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
935 
936   XDELETE (ctx);
937 }
938 
939 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
940    context.  */
941 
942 static void
943 fixup_child_record_type (omp_context *ctx)
944 {
945   tree f, type = ctx->record_type;
946 
947   if (!ctx->receiver_decl)
948     return;
949   /* ??? It isn't sufficient to just call remap_type here, because
950      variably_modified_type_p doesn't work the way we expect for
951      record types.  Testing each field for whether it needs remapping
952      and creating a new record by hand works, however.  */
953   for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
954     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
955       break;
956   if (f)
957     {
958       tree name, new_fields = NULL;
959 
960       type = lang_hooks.types.make_type (RECORD_TYPE);
961       name = DECL_NAME (TYPE_NAME (ctx->record_type));
962       name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
963 			 TYPE_DECL, name, type);
964       TYPE_NAME (type) = name;
965 
966       for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
967 	{
968 	  tree new_f = copy_node (f);
969 	  DECL_CONTEXT (new_f) = type;
970 	  TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
971 	  DECL_CHAIN (new_f) = new_fields;
972 	  walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
973 	  walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
974 		     &ctx->cb, NULL);
975 	  walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
976 		     &ctx->cb, NULL);
977 	  new_fields = new_f;
978 
979 	  /* Arrange to be able to look up the receiver field
980 	     given the sender field.  */
981 	  splay_tree_insert (ctx->field_map, (splay_tree_key) f,
982 			     (splay_tree_value) new_f);
983 	}
984       TYPE_FIELDS (type) = nreverse (new_fields);
985       layout_type (type);
986     }
987 
988   /* In a target region we never modify any of the pointers in *.omp_data_i,
989      so attempt to help the optimizers.  */
990   if (is_gimple_omp_offloaded (ctx->stmt))
991     type = build_qualified_type (type, TYPE_QUAL_CONST);
992 
993   TREE_TYPE (ctx->receiver_decl)
994     = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
995 }
996 
997 /* Instantiate decls as necessary in CTX to satisfy the data sharing
998    specified by CLAUSES.  If BASE_POINTERS_RESTRICT, install var field with
999    restrict.  */
1000 
1001 static void
1002 scan_sharing_clauses (tree clauses, omp_context *ctx,
1003 		      bool base_pointers_restrict = false)
1004 {
1005   tree c, decl;
1006   bool scan_array_reductions = false;
1007 
1008   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1009     {
1010       bool by_ref;
1011 
1012       switch (OMP_CLAUSE_CODE (c))
1013 	{
1014 	case OMP_CLAUSE_PRIVATE:
1015 	  decl = OMP_CLAUSE_DECL (c);
1016 	  if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1017 	    goto do_private;
1018 	  else if (!is_variable_sized (decl))
1019 	    install_var_local (decl, ctx);
1020 	  break;
1021 
1022 	case OMP_CLAUSE_SHARED:
1023 	  decl = OMP_CLAUSE_DECL (c);
1024 	  /* Ignore shared directives in teams construct.  */
1025 	  if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1026 	    {
1027 	      /* Global variables don't need to be copied,
1028 		 the receiver side will use them directly.  */
1029 	      tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1030 	      if (is_global_var (odecl))
1031 		break;
1032 	      insert_decl_map (&ctx->cb, decl, odecl);
1033 	      break;
1034 	    }
1035 	  gcc_assert (is_taskreg_ctx (ctx));
1036 	  gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1037 		      || !is_variable_sized (decl));
1038 	  /* Global variables don't need to be copied,
1039 	     the receiver side will use them directly.  */
1040 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1041 	    break;
1042 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1043 	    {
1044 	      use_pointer_for_field (decl, ctx);
1045 	      break;
1046 	    }
1047 	  by_ref = use_pointer_for_field (decl, NULL);
1048 	  if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1049 	      || TREE_ADDRESSABLE (decl)
1050 	      || by_ref
1051 	      || omp_is_reference (decl))
1052 	    {
1053 	      by_ref = use_pointer_for_field (decl, ctx);
1054 	      install_var_field (decl, by_ref, 3, ctx);
1055 	      install_var_local (decl, ctx);
1056 	      break;
1057 	    }
1058 	  /* We don't need to copy const scalar vars back.  */
1059 	  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1060 	  goto do_private;
1061 
1062 	case OMP_CLAUSE_REDUCTION:
1063 	  decl = OMP_CLAUSE_DECL (c);
1064 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1065 	      && TREE_CODE (decl) == MEM_REF)
1066 	    {
1067 	      tree t = TREE_OPERAND (decl, 0);
1068 	      if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1069 		t = TREE_OPERAND (t, 0);
1070 	      if (TREE_CODE (t) == INDIRECT_REF
1071 		  || TREE_CODE (t) == ADDR_EXPR)
1072 		t = TREE_OPERAND (t, 0);
1073 	      install_var_local (t, ctx);
1074 	      if (is_taskreg_ctx (ctx)
1075 		  && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1076 		  && !is_variable_sized (t))
1077 		{
1078 		  by_ref = use_pointer_for_field (t, ctx);
1079 		  install_var_field (t, by_ref, 3, ctx);
1080 		}
1081 	      break;
1082 	    }
1083 	  goto do_private;
1084 
1085 	case OMP_CLAUSE_LASTPRIVATE:
1086 	  /* Let the corresponding firstprivate clause create
1087 	     the variable.  */
1088 	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1089 	    break;
1090 	  /* FALLTHRU */
1091 
1092 	case OMP_CLAUSE_FIRSTPRIVATE:
1093 	case OMP_CLAUSE_LINEAR:
1094 	  decl = OMP_CLAUSE_DECL (c);
1095 	do_private:
1096 	  if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1097 	       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1098 	      && is_gimple_omp_offloaded (ctx->stmt))
1099 	    {
1100 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1101 		install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1102 	      else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1103 		install_var_field (decl, true, 3, ctx);
1104 	      else
1105 		install_var_field (decl, false, 3, ctx);
1106 	    }
1107 	  if (is_variable_sized (decl))
1108 	    {
1109 	      if (is_task_ctx (ctx))
1110 		install_var_field (decl, false, 1, ctx);
1111 	      break;
1112 	    }
1113 	  else if (is_taskreg_ctx (ctx))
1114 	    {
1115 	      bool global
1116 		= is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1117 	      by_ref = use_pointer_for_field (decl, NULL);
1118 
1119 	      if (is_task_ctx (ctx)
1120 		  && (global || by_ref || omp_is_reference (decl)))
1121 		{
1122 		  install_var_field (decl, false, 1, ctx);
1123 		  if (!global)
1124 		    install_var_field (decl, by_ref, 2, ctx);
1125 		}
1126 	      else if (!global)
1127 		install_var_field (decl, by_ref, 3, ctx);
1128 	    }
1129 	  install_var_local (decl, ctx);
1130 	  break;
1131 
1132 	case OMP_CLAUSE_USE_DEVICE_PTR:
1133 	  decl = OMP_CLAUSE_DECL (c);
1134 	  if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1135 	    install_var_field (decl, true, 3, ctx);
1136 	  else
1137 	    install_var_field (decl, false, 3, ctx);
1138 	  if (DECL_SIZE (decl)
1139 	      && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1140 	    {
1141 	      tree decl2 = DECL_VALUE_EXPR (decl);
1142 	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1143 	      decl2 = TREE_OPERAND (decl2, 0);
1144 	      gcc_assert (DECL_P (decl2));
1145 	      install_var_local (decl2, ctx);
1146 	    }
1147 	  install_var_local (decl, ctx);
1148 	  break;
1149 
1150 	case OMP_CLAUSE_IS_DEVICE_PTR:
1151 	  decl = OMP_CLAUSE_DECL (c);
1152 	  goto do_private;
1153 
1154 	case OMP_CLAUSE__LOOPTEMP_:
1155 	  gcc_assert (is_taskreg_ctx (ctx));
1156 	  decl = OMP_CLAUSE_DECL (c);
1157 	  install_var_field (decl, false, 3, ctx);
1158 	  install_var_local (decl, ctx);
1159 	  break;
1160 
1161 	case OMP_CLAUSE_COPYPRIVATE:
1162 	case OMP_CLAUSE_COPYIN:
1163 	  decl = OMP_CLAUSE_DECL (c);
1164 	  by_ref = use_pointer_for_field (decl, NULL);
1165 	  install_var_field (decl, by_ref, 3, ctx);
1166 	  break;
1167 
1168 	case OMP_CLAUSE_DEFAULT:
1169 	  ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1170 	  break;
1171 
1172 	case OMP_CLAUSE_FINAL:
1173 	case OMP_CLAUSE_IF:
1174 	case OMP_CLAUSE_NUM_THREADS:
1175 	case OMP_CLAUSE_NUM_TEAMS:
1176 	case OMP_CLAUSE_THREAD_LIMIT:
1177 	case OMP_CLAUSE_DEVICE:
1178 	case OMP_CLAUSE_SCHEDULE:
1179 	case OMP_CLAUSE_DIST_SCHEDULE:
1180 	case OMP_CLAUSE_DEPEND:
1181 	case OMP_CLAUSE_PRIORITY:
1182 	case OMP_CLAUSE_GRAINSIZE:
1183 	case OMP_CLAUSE_NUM_TASKS:
1184 	case OMP_CLAUSE__CILK_FOR_COUNT_:
1185 	case OMP_CLAUSE_NUM_GANGS:
1186 	case OMP_CLAUSE_NUM_WORKERS:
1187 	case OMP_CLAUSE_VECTOR_LENGTH:
1188 	  if (ctx->outer)
1189 	    scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1190 	  break;
1191 
1192 	case OMP_CLAUSE_TO:
1193 	case OMP_CLAUSE_FROM:
1194 	case OMP_CLAUSE_MAP:
1195 	  if (ctx->outer)
1196 	    scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1197 	  decl = OMP_CLAUSE_DECL (c);
1198 	  /* Global variables with "omp declare target" attribute
1199 	     don't need to be copied, the receiver side will use them
1200 	     directly.  However, global variables with "omp declare target link"
1201 	     attribute need to be copied.  Or when ALWAYS modifier is used.  */
1202 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1203 	      && DECL_P (decl)
1204 	      && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1205 		   && (OMP_CLAUSE_MAP_KIND (c)
1206 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1207 		  || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1208 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1209 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1210 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1211 	      && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1212 	      && varpool_node::get_create (decl)->offloadable
1213 	      && !lookup_attribute ("omp declare target link",
1214 				    DECL_ATTRIBUTES (decl)))
1215 	    break;
1216 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1217 	      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1218 	    {
1219 	      /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1220 		 not offloaded; there is nothing to map for those.  */
1221 	      if (!is_gimple_omp_offloaded (ctx->stmt)
1222 		  && !POINTER_TYPE_P (TREE_TYPE (decl))
1223 		  && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1224 		break;
1225 	    }
1226 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1227 	      && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1228 		  || (OMP_CLAUSE_MAP_KIND (c)
1229 		      == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1230 	    {
1231 	      if (TREE_CODE (decl) == COMPONENT_REF
1232 		  || (TREE_CODE (decl) == INDIRECT_REF
1233 		      && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1234 		      && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1235 			  == REFERENCE_TYPE)))
1236 		break;
1237 	      if (DECL_SIZE (decl)
1238 		  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1239 		{
1240 		  tree decl2 = DECL_VALUE_EXPR (decl);
1241 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1242 		  decl2 = TREE_OPERAND (decl2, 0);
1243 		  gcc_assert (DECL_P (decl2));
1244 		  install_var_local (decl2, ctx);
1245 		}
1246 	      install_var_local (decl, ctx);
1247 	      break;
1248 	    }
1249 	  if (DECL_P (decl))
1250 	    {
1251 	      if (DECL_SIZE (decl)
1252 		  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1253 		{
1254 		  tree decl2 = DECL_VALUE_EXPR (decl);
1255 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1256 		  decl2 = TREE_OPERAND (decl2, 0);
1257 		  gcc_assert (DECL_P (decl2));
1258 		  install_var_field (decl2, true, 3, ctx);
1259 		  install_var_local (decl2, ctx);
1260 		  install_var_local (decl, ctx);
1261 		}
1262 	      else
1263 		{
1264 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1265 		      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1266 		      && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1267 		      && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1268 		    install_var_field (decl, true, 7, ctx);
1269 		  else
1270 		    install_var_field (decl, true, 3, ctx,
1271 				       base_pointers_restrict);
1272 		  if (is_gimple_omp_offloaded (ctx->stmt)
1273 		      && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1274 		    install_var_local (decl, ctx);
1275 		}
1276 	    }
1277 	  else
1278 	    {
1279 	      tree base = get_base_address (decl);
1280 	      tree nc = OMP_CLAUSE_CHAIN (c);
1281 	      if (DECL_P (base)
1282 		  && nc != NULL_TREE
1283 		  && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1284 		  && OMP_CLAUSE_DECL (nc) == base
1285 		  && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1286 		  && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1287 		{
1288 		  OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1289 		  OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1290 		}
1291 	      else
1292 		{
1293 		  if (ctx->outer)
1294 		    {
1295 		      scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1296 		      decl = OMP_CLAUSE_DECL (c);
1297 		    }
1298 		  gcc_assert (!splay_tree_lookup (ctx->field_map,
1299 						  (splay_tree_key) decl));
1300 		  tree field
1301 		    = build_decl (OMP_CLAUSE_LOCATION (c),
1302 				  FIELD_DECL, NULL_TREE, ptr_type_node);
1303 		  SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1304 		  insert_field_into_struct (ctx->record_type, field);
1305 		  splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1306 				     (splay_tree_value) field);
1307 		}
1308 	    }
1309 	  break;
1310 
1311 	case OMP_CLAUSE__GRIDDIM_:
1312 	  if (ctx->outer)
1313 	    {
1314 	      scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1315 	      scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1316 	    }
1317 	  break;
1318 
1319 	case OMP_CLAUSE_NOWAIT:
1320 	case OMP_CLAUSE_ORDERED:
1321 	case OMP_CLAUSE_COLLAPSE:
1322 	case OMP_CLAUSE_UNTIED:
1323 	case OMP_CLAUSE_MERGEABLE:
1324 	case OMP_CLAUSE_PROC_BIND:
1325 	case OMP_CLAUSE_SAFELEN:
1326 	case OMP_CLAUSE_SIMDLEN:
1327 	case OMP_CLAUSE_THREADS:
1328 	case OMP_CLAUSE_SIMD:
1329 	case OMP_CLAUSE_NOGROUP:
1330 	case OMP_CLAUSE_DEFAULTMAP:
1331 	case OMP_CLAUSE_ASYNC:
1332 	case OMP_CLAUSE_WAIT:
1333 	case OMP_CLAUSE_GANG:
1334 	case OMP_CLAUSE_WORKER:
1335 	case OMP_CLAUSE_VECTOR:
1336 	case OMP_CLAUSE_INDEPENDENT:
1337 	case OMP_CLAUSE_AUTO:
1338 	case OMP_CLAUSE_SEQ:
1339 	case OMP_CLAUSE_TILE:
1340 	case OMP_CLAUSE__SIMT_:
1341 	  break;
1342 
1343 	case OMP_CLAUSE_ALIGNED:
1344 	  decl = OMP_CLAUSE_DECL (c);
1345 	  if (is_global_var (decl)
1346 	      && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1347 	    install_var_local (decl, ctx);
1348 	  break;
1349 
1350 	case OMP_CLAUSE__CACHE_:
1351 	default:
1352 	  gcc_unreachable ();
1353 	}
1354     }
1355 
1356   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1357     {
1358       switch (OMP_CLAUSE_CODE (c))
1359 	{
1360 	case OMP_CLAUSE_LASTPRIVATE:
1361 	  /* Let the corresponding firstprivate clause create
1362 	     the variable.  */
1363 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1364 	    scan_array_reductions = true;
1365 	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1366 	    break;
1367 	  /* FALLTHRU */
1368 
1369 	case OMP_CLAUSE_FIRSTPRIVATE:
1370 	case OMP_CLAUSE_PRIVATE:
1371 	case OMP_CLAUSE_LINEAR:
1372 	case OMP_CLAUSE_IS_DEVICE_PTR:
1373 	  decl = OMP_CLAUSE_DECL (c);
1374 	  if (is_variable_sized (decl))
1375 	    {
1376 	      if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1377 		   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1378 		  && is_gimple_omp_offloaded (ctx->stmt))
1379 		{
1380 		  tree decl2 = DECL_VALUE_EXPR (decl);
1381 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1382 		  decl2 = TREE_OPERAND (decl2, 0);
1383 		  gcc_assert (DECL_P (decl2));
1384 		  install_var_local (decl2, ctx);
1385 		  fixup_remapped_decl (decl2, ctx, false);
1386 		}
1387 	      install_var_local (decl, ctx);
1388 	    }
1389 	  fixup_remapped_decl (decl, ctx,
1390 			       OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1391 			       && OMP_CLAUSE_PRIVATE_DEBUG (c));
1392 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1393 	      && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1394 	    scan_array_reductions = true;
1395 	  break;
1396 
1397 	case OMP_CLAUSE_REDUCTION:
1398 	  decl = OMP_CLAUSE_DECL (c);
1399 	  if (TREE_CODE (decl) != MEM_REF)
1400 	    {
1401 	      if (is_variable_sized (decl))
1402 		install_var_local (decl, ctx);
1403 	      fixup_remapped_decl (decl, ctx, false);
1404 	    }
1405 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1406 	    scan_array_reductions = true;
1407 	  break;
1408 
1409 	case OMP_CLAUSE_SHARED:
1410 	  /* Ignore shared directives in teams construct.  */
1411 	  if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1412 	    break;
1413 	  decl = OMP_CLAUSE_DECL (c);
1414 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1415 	    break;
1416 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1417 	    {
1418 	      if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1419 								 ctx->outer)))
1420 		break;
1421 	      bool by_ref = use_pointer_for_field (decl, ctx);
1422 	      install_var_field (decl, by_ref, 11, ctx);
1423 	      break;
1424 	    }
1425 	  fixup_remapped_decl (decl, ctx, false);
1426 	  break;
1427 
1428 	case OMP_CLAUSE_MAP:
1429 	  if (!is_gimple_omp_offloaded (ctx->stmt))
1430 	    break;
1431 	  decl = OMP_CLAUSE_DECL (c);
1432 	  if (DECL_P (decl)
1433 	      && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1434 		   && (OMP_CLAUSE_MAP_KIND (c)
1435 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1436 		  || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1437 	      && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1438 	      && varpool_node::get_create (decl)->offloadable)
1439 	    break;
1440 	  if (DECL_P (decl))
1441 	    {
1442 	      if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1443 		   || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1444 		  && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1445 		  && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1446 		{
1447 		  tree new_decl = lookup_decl (decl, ctx);
1448 		  TREE_TYPE (new_decl)
1449 		    = remap_type (TREE_TYPE (decl), &ctx->cb);
1450 		}
1451 	      else if (DECL_SIZE (decl)
1452 		       && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1453 		{
1454 		  tree decl2 = DECL_VALUE_EXPR (decl);
1455 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1456 		  decl2 = TREE_OPERAND (decl2, 0);
1457 		  gcc_assert (DECL_P (decl2));
1458 		  fixup_remapped_decl (decl2, ctx, false);
1459 		  fixup_remapped_decl (decl, ctx, true);
1460 		}
1461 	      else
1462 		fixup_remapped_decl (decl, ctx, false);
1463 	    }
1464 	  break;
1465 
1466 	case OMP_CLAUSE_COPYPRIVATE:
1467 	case OMP_CLAUSE_COPYIN:
1468 	case OMP_CLAUSE_DEFAULT:
1469 	case OMP_CLAUSE_IF:
1470 	case OMP_CLAUSE_NUM_THREADS:
1471 	case OMP_CLAUSE_NUM_TEAMS:
1472 	case OMP_CLAUSE_THREAD_LIMIT:
1473 	case OMP_CLAUSE_DEVICE:
1474 	case OMP_CLAUSE_SCHEDULE:
1475 	case OMP_CLAUSE_DIST_SCHEDULE:
1476 	case OMP_CLAUSE_NOWAIT:
1477 	case OMP_CLAUSE_ORDERED:
1478 	case OMP_CLAUSE_COLLAPSE:
1479 	case OMP_CLAUSE_UNTIED:
1480 	case OMP_CLAUSE_FINAL:
1481 	case OMP_CLAUSE_MERGEABLE:
1482 	case OMP_CLAUSE_PROC_BIND:
1483 	case OMP_CLAUSE_SAFELEN:
1484 	case OMP_CLAUSE_SIMDLEN:
1485 	case OMP_CLAUSE_ALIGNED:
1486 	case OMP_CLAUSE_DEPEND:
1487 	case OMP_CLAUSE__LOOPTEMP_:
1488 	case OMP_CLAUSE_TO:
1489 	case OMP_CLAUSE_FROM:
1490 	case OMP_CLAUSE_PRIORITY:
1491 	case OMP_CLAUSE_GRAINSIZE:
1492 	case OMP_CLAUSE_NUM_TASKS:
1493 	case OMP_CLAUSE_THREADS:
1494 	case OMP_CLAUSE_SIMD:
1495 	case OMP_CLAUSE_NOGROUP:
1496 	case OMP_CLAUSE_DEFAULTMAP:
1497 	case OMP_CLAUSE_USE_DEVICE_PTR:
1498 	case OMP_CLAUSE__CILK_FOR_COUNT_:
1499 	case OMP_CLAUSE_ASYNC:
1500 	case OMP_CLAUSE_WAIT:
1501 	case OMP_CLAUSE_NUM_GANGS:
1502 	case OMP_CLAUSE_NUM_WORKERS:
1503 	case OMP_CLAUSE_VECTOR_LENGTH:
1504 	case OMP_CLAUSE_GANG:
1505 	case OMP_CLAUSE_WORKER:
1506 	case OMP_CLAUSE_VECTOR:
1507 	case OMP_CLAUSE_INDEPENDENT:
1508 	case OMP_CLAUSE_AUTO:
1509 	case OMP_CLAUSE_SEQ:
1510 	case OMP_CLAUSE_TILE:
1511 	case OMP_CLAUSE__GRIDDIM_:
1512 	case OMP_CLAUSE__SIMT_:
1513 	  break;
1514 
1515 	case OMP_CLAUSE__CACHE_:
1516 	default:
1517 	  gcc_unreachable ();
1518 	}
1519     }
1520 
1521   gcc_checking_assert (!scan_array_reductions
1522 		       || !is_gimple_omp_oacc (ctx->stmt));
1523   if (scan_array_reductions)
1524     {
1525       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1526 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1527 	    && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1528 	  {
1529 	    scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1530 	    scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1531 	  }
1532 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1533 		 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1534 	  scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1535 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1536 		 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1537 	  scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1538     }
1539 }
1540 
1541 /* Create a new name for omp child function.  Returns an identifier.  If
1542    IS_CILK_FOR is true then the suffix for the child function is
1543    "_cilk_for_fn."  */
1544 
1545 static tree
1546 create_omp_child_function_name (bool task_copy, bool is_cilk_for)
1547 {
1548   if (is_cilk_for)
1549     return clone_function_name (current_function_decl, "_cilk_for_fn");
1550   return clone_function_name (current_function_decl,
1551 			      task_copy ? "_omp_cpyfn" : "_omp_fn");
1552 }
1553 
1554 /* Returns the type of the induction variable for the child function for
1555    _Cilk_for and the types for _high and _low variables based on TYPE.  */
1556 
1557 static tree
1558 cilk_for_check_loop_diff_type (tree type)
1559 {
1560   if (TYPE_PRECISION (type) <= TYPE_PRECISION (uint32_type_node))
1561     {
1562       if (TYPE_UNSIGNED (type))
1563 	return uint32_type_node;
1564       else
1565 	return integer_type_node;
1566     }
1567   else
1568     {
1569       if (TYPE_UNSIGNED (type))
1570 	return uint64_type_node;
1571       else
1572 	return long_long_integer_type_node;
1573     }
1574 }
1575 
1576 /* Return true if CTX may belong to offloaded code: either if current function
1577    is offloaded, or any enclosing context corresponds to a target region.  */
1578 
1579 static bool
1580 omp_maybe_offloaded_ctx (omp_context *ctx)
1581 {
1582   if (cgraph_node::get (current_function_decl)->offloadable)
1583     return true;
1584   for (; ctx; ctx = ctx->outer)
1585     if (is_gimple_omp_offloaded (ctx->stmt))
1586       return true;
1587   return false;
1588 }
1589 
1590 /* Build a decl for the omp child function.  It'll not contain a body
1591    yet, just the bare decl.  */
1592 
1593 static void
1594 create_omp_child_function (omp_context *ctx, bool task_copy)
1595 {
1596   tree decl, type, name, t;
1597 
1598   tree cilk_for_count
1599     = (flag_cilkplus && gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1600       ? omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
1601 			 OMP_CLAUSE__CILK_FOR_COUNT_) : NULL_TREE;
1602   tree cilk_var_type = NULL_TREE;
1603 
1604   name = create_omp_child_function_name (task_copy,
1605 					 cilk_for_count != NULL_TREE);
1606   if (task_copy)
1607     type = build_function_type_list (void_type_node, ptr_type_node,
1608 				     ptr_type_node, NULL_TREE);
1609   else if (cilk_for_count)
1610     {
1611       type = TREE_TYPE (OMP_CLAUSE_OPERAND (cilk_for_count, 0));
1612       cilk_var_type = cilk_for_check_loop_diff_type (type);
1613       type = build_function_type_list (void_type_node, ptr_type_node,
1614 				       cilk_var_type, cilk_var_type, NULL_TREE);
1615     }
1616   else
1617     type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1618 
1619   decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1620 
1621   gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1622 		       || !task_copy);
1623   if (!task_copy)
1624     ctx->cb.dst_fn = decl;
1625   else
1626     gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1627 
1628   TREE_STATIC (decl) = 1;
1629   TREE_USED (decl) = 1;
1630   DECL_ARTIFICIAL (decl) = 1;
1631   DECL_IGNORED_P (decl) = 0;
1632   TREE_PUBLIC (decl) = 0;
1633   DECL_UNINLINABLE (decl) = 1;
1634   DECL_EXTERNAL (decl) = 0;
1635   DECL_CONTEXT (decl) = NULL_TREE;
1636   DECL_INITIAL (decl) = make_node (BLOCK);
1637   BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1638   if (omp_maybe_offloaded_ctx (ctx))
1639     {
1640       cgraph_node::get_create (decl)->offloadable = 1;
1641       if (ENABLE_OFFLOADING)
1642 	g->have_offload = true;
1643     }
1644 
1645   if (cgraph_node::get_create (decl)->offloadable
1646       && !lookup_attribute ("omp declare target",
1647                            DECL_ATTRIBUTES (current_function_decl)))
1648     {
1649       const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1650 				 ? "omp target entrypoint"
1651 				 : "omp declare target");
1652       DECL_ATTRIBUTES (decl)
1653 	= tree_cons (get_identifier (target_attr),
1654 		     NULL_TREE, DECL_ATTRIBUTES (decl));
1655     }
1656 
1657   t = build_decl (DECL_SOURCE_LOCATION (decl),
1658 		  RESULT_DECL, NULL_TREE, void_type_node);
1659   DECL_ARTIFICIAL (t) = 1;
1660   DECL_IGNORED_P (t) = 1;
1661   DECL_CONTEXT (t) = decl;
1662   DECL_RESULT (decl) = t;
1663 
1664   /* _Cilk_for's child function requires two extra parameters called
1665      __low and __high that are set the by Cilk runtime when it calls this
1666      function.  */
1667   if (cilk_for_count)
1668     {
1669       t = build_decl (DECL_SOURCE_LOCATION (decl),
1670 		      PARM_DECL, get_identifier ("__high"), cilk_var_type);
1671       DECL_ARTIFICIAL (t) = 1;
1672       DECL_NAMELESS (t) = 1;
1673       DECL_ARG_TYPE (t) = ptr_type_node;
1674       DECL_CONTEXT (t) = current_function_decl;
1675       TREE_USED (t) = 1;
1676       DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1677       DECL_ARGUMENTS (decl) = t;
1678 
1679       t = build_decl (DECL_SOURCE_LOCATION (decl),
1680 		      PARM_DECL, get_identifier ("__low"), cilk_var_type);
1681       DECL_ARTIFICIAL (t) = 1;
1682       DECL_NAMELESS (t) = 1;
1683       DECL_ARG_TYPE (t) = ptr_type_node;
1684       DECL_CONTEXT (t) = current_function_decl;
1685       TREE_USED (t) = 1;
1686       DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1687       DECL_ARGUMENTS (decl) = t;
1688     }
1689 
1690   tree data_name = get_identifier (".omp_data_i");
1691   t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1692 		  ptr_type_node);
1693   DECL_ARTIFICIAL (t) = 1;
1694   DECL_NAMELESS (t) = 1;
1695   DECL_ARG_TYPE (t) = ptr_type_node;
1696   DECL_CONTEXT (t) = current_function_decl;
1697   TREE_USED (t) = 1;
1698   TREE_READONLY (t) = 1;
1699   if (cilk_for_count)
1700     DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1701   DECL_ARGUMENTS (decl) = t;
1702   if (!task_copy)
1703     ctx->receiver_decl = t;
1704   else
1705     {
1706       t = build_decl (DECL_SOURCE_LOCATION (decl),
1707 		      PARM_DECL, get_identifier (".omp_data_o"),
1708 		      ptr_type_node);
1709       DECL_ARTIFICIAL (t) = 1;
1710       DECL_NAMELESS (t) = 1;
1711       DECL_ARG_TYPE (t) = ptr_type_node;
1712       DECL_CONTEXT (t) = current_function_decl;
1713       TREE_USED (t) = 1;
1714       TREE_ADDRESSABLE (t) = 1;
1715       DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1716       DECL_ARGUMENTS (decl) = t;
1717     }
1718 
1719   /* Allocate memory for the function structure.  The call to
1720      allocate_struct_function clobbers CFUN, so we need to restore
1721      it afterward.  */
1722   push_struct_function (decl);
1723   cfun->function_end_locus = gimple_location (ctx->stmt);
1724   init_tree_ssa (cfun);
1725   pop_cfun ();
1726 }
1727 
1728 /* Callback for walk_gimple_seq.  Check if combined parallel
1729    contains gimple_omp_for_combined_into_p OMP_FOR.  */
1730 
1731 tree
1732 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1733 		       bool *handled_ops_p,
1734 		       struct walk_stmt_info *wi)
1735 {
1736   gimple *stmt = gsi_stmt (*gsi_p);
1737 
1738   *handled_ops_p = true;
1739   switch (gimple_code (stmt))
1740     {
1741     WALK_SUBSTMTS;
1742 
1743     case GIMPLE_OMP_FOR:
1744       if (gimple_omp_for_combined_into_p (stmt)
1745 	  && gimple_omp_for_kind (stmt)
1746 	     == *(const enum gf_mask *) (wi->info))
1747 	{
1748 	  wi->info = stmt;
1749 	  return integer_zero_node;
1750 	}
1751       break;
1752     default:
1753       break;
1754     }
1755   return NULL;
1756 }
1757 
1758 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task.  */
1759 
1760 static void
1761 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1762 			      omp_context *outer_ctx)
1763 {
1764   struct walk_stmt_info wi;
1765 
1766   memset (&wi, 0, sizeof (wi));
1767   wi.val_only = true;
1768   wi.info = (void *) &msk;
1769   walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1770   if (wi.info != (void *) &msk)
1771     {
1772       gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1773       struct omp_for_data fd;
1774       omp_extract_for_data (for_stmt, &fd, NULL);
1775       /* We need two temporaries with fd.loop.v type (istart/iend)
1776 	 and then (fd.collapse - 1) temporaries with the same
1777 	 type for count2 ... countN-1 vars if not constant.  */
1778       size_t count = 2, i;
1779       tree type = fd.iter_type;
1780       if (fd.collapse > 1
1781 	  && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1782 	{
1783 	  count += fd.collapse - 1;
1784 	  /* If there are lastprivate clauses on the inner
1785 	     GIMPLE_OMP_FOR, add one more temporaries for the total number
1786 	     of iterations (product of count1 ... countN-1).  */
1787 	  if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1788 			       OMP_CLAUSE_LASTPRIVATE))
1789 	    count++;
1790 	  else if (msk == GF_OMP_FOR_KIND_FOR
1791 		   && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1792 				       OMP_CLAUSE_LASTPRIVATE))
1793 	    count++;
1794 	}
1795       for (i = 0; i < count; i++)
1796 	{
1797 	  tree temp = create_tmp_var (type);
1798 	  tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1799 	  insert_decl_map (&outer_ctx->cb, temp, temp);
1800 	  OMP_CLAUSE_DECL (c) = temp;
1801 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1802 	  gimple_omp_taskreg_set_clauses (stmt, c);
1803 	}
1804     }
1805 }
1806 
1807 /* Scan an OpenMP parallel directive.  */
1808 
1809 static void
1810 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1811 {
1812   omp_context *ctx;
1813   tree name;
1814   gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1815 
1816   /* Ignore parallel directives with empty bodies, unless there
1817      are copyin clauses.  */
1818   if (optimize > 0
1819       && empty_body_p (gimple_omp_body (stmt))
1820       && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1821 			  OMP_CLAUSE_COPYIN) == NULL)
1822     {
1823       gsi_replace (gsi, gimple_build_nop (), false);
1824       return;
1825     }
1826 
1827   if (gimple_omp_parallel_combined_p (stmt))
1828     add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1829 
1830   ctx = new_omp_context (stmt, outer_ctx);
1831   taskreg_contexts.safe_push (ctx);
1832   if (taskreg_nesting_level > 1)
1833     ctx->is_nested = true;
1834   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1835   ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1836   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1837   name = create_tmp_var_name (".omp_data_s");
1838   name = build_decl (gimple_location (stmt),
1839 		     TYPE_DECL, name, ctx->record_type);
1840   DECL_ARTIFICIAL (name) = 1;
1841   DECL_NAMELESS (name) = 1;
1842   TYPE_NAME (ctx->record_type) = name;
1843   TYPE_ARTIFICIAL (ctx->record_type) = 1;
1844   if (!gimple_omp_parallel_grid_phony (stmt))
1845     {
1846       create_omp_child_function (ctx, false);
1847       gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1848     }
1849 
1850   scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1851   scan_omp (gimple_omp_body_ptr (stmt), ctx);
1852 
1853   if (TYPE_FIELDS (ctx->record_type) == NULL)
1854     ctx->record_type = ctx->receiver_decl = NULL;
1855 }
1856 
1857 /* Scan an OpenMP task directive.  */
1858 
1859 static void
1860 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1861 {
1862   omp_context *ctx;
1863   tree name, t;
1864   gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1865 
1866   /* Ignore task directives with empty bodies, unless they have depend
1867      clause.  */
1868   if (optimize > 0
1869       && empty_body_p (gimple_omp_body (stmt))
1870       && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1871     {
1872       gsi_replace (gsi, gimple_build_nop (), false);
1873       return;
1874     }
1875 
1876   if (gimple_omp_task_taskloop_p (stmt))
1877     add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1878 
1879   ctx = new_omp_context (stmt, outer_ctx);
1880   taskreg_contexts.safe_push (ctx);
1881   if (taskreg_nesting_level > 1)
1882     ctx->is_nested = true;
1883   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1884   ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1885   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1886   name = create_tmp_var_name (".omp_data_s");
1887   name = build_decl (gimple_location (stmt),
1888 		     TYPE_DECL, name, ctx->record_type);
1889   DECL_ARTIFICIAL (name) = 1;
1890   DECL_NAMELESS (name) = 1;
1891   TYPE_NAME (ctx->record_type) = name;
1892   TYPE_ARTIFICIAL (ctx->record_type) = 1;
1893   create_omp_child_function (ctx, false);
1894   gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1895 
1896   scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1897 
1898   if (ctx->srecord_type)
1899     {
1900       name = create_tmp_var_name (".omp_data_a");
1901       name = build_decl (gimple_location (stmt),
1902 			 TYPE_DECL, name, ctx->srecord_type);
1903       DECL_ARTIFICIAL (name) = 1;
1904       DECL_NAMELESS (name) = 1;
1905       TYPE_NAME (ctx->srecord_type) = name;
1906       TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1907       create_omp_child_function (ctx, true);
1908     }
1909 
1910   scan_omp (gimple_omp_body_ptr (stmt), ctx);
1911 
1912   if (TYPE_FIELDS (ctx->record_type) == NULL)
1913     {
1914       ctx->record_type = ctx->receiver_decl = NULL;
1915       t = build_int_cst (long_integer_type_node, 0);
1916       gimple_omp_task_set_arg_size (stmt, t);
1917       t = build_int_cst (long_integer_type_node, 1);
1918       gimple_omp_task_set_arg_align (stmt, t);
1919     }
1920 }
1921 
1922 /* Helper function for finish_taskreg_scan, called through walk_tree.
1923    If maybe_lookup_decl_in_outer_context returns non-NULL for some
1924    tree, replace it in the expression.  */
1925 
1926 static tree
1927 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1928 {
1929   if (VAR_P (*tp))
1930     {
1931       omp_context *ctx = (omp_context *) data;
1932       tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1933       if (t != *tp)
1934 	{
1935 	  if (DECL_HAS_VALUE_EXPR_P (t))
1936 	    t = unshare_expr (DECL_VALUE_EXPR (t));
1937 	  *tp = t;
1938 	}
1939       *walk_subtrees = 0;
1940     }
1941   else if (IS_TYPE_OR_DECL_P (*tp))
1942     *walk_subtrees = 0;
1943   return NULL_TREE;
1944 }
1945 
1946 /* If any decls have been made addressable during scan_omp,
1947    adjust their fields if needed, and layout record types
1948    of parallel/task constructs.  */
1949 
1950 static void
1951 finish_taskreg_scan (omp_context *ctx)
1952 {
1953   if (ctx->record_type == NULL_TREE)
1954     return;
1955 
1956   /* If any task_shared_vars were needed, verify all
1957      OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1958      statements if use_pointer_for_field hasn't changed
1959      because of that.  If it did, update field types now.  */
1960   if (task_shared_vars)
1961     {
1962       tree c;
1963 
1964       for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1965 	   c; c = OMP_CLAUSE_CHAIN (c))
1966 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1967 	    && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1968 	  {
1969 	    tree decl = OMP_CLAUSE_DECL (c);
1970 
1971 	    /* Global variables don't need to be copied,
1972 	       the receiver side will use them directly.  */
1973 	    if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1974 	      continue;
1975 	    if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1976 		|| !use_pointer_for_field (decl, ctx))
1977 	      continue;
1978 	    tree field = lookup_field (decl, ctx);
1979 	    if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1980 		&& TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1981 	      continue;
1982 	    TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1983 	    TREE_THIS_VOLATILE (field) = 0;
1984 	    DECL_USER_ALIGN (field) = 0;
1985 	    SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1986 	    if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1987 	      SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1988 	    if (ctx->srecord_type)
1989 	      {
1990 		tree sfield = lookup_sfield (decl, ctx);
1991 		TREE_TYPE (sfield) = TREE_TYPE (field);
1992 		TREE_THIS_VOLATILE (sfield) = 0;
1993 		DECL_USER_ALIGN (sfield) = 0;
1994 		SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1995 		if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1996 		  SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1997 	      }
1998 	  }
1999     }
2000 
2001   if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2002     {
2003       layout_type (ctx->record_type);
2004       fixup_child_record_type (ctx);
2005     }
2006   else
2007     {
2008       location_t loc = gimple_location (ctx->stmt);
2009       tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2010       /* Move VLA fields to the end.  */
2011       p = &TYPE_FIELDS (ctx->record_type);
2012       while (*p)
2013 	if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2014 	    || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2015 	  {
2016 	    *q = *p;
2017 	    *p = TREE_CHAIN (*p);
2018 	    TREE_CHAIN (*q) = NULL_TREE;
2019 	    q = &TREE_CHAIN (*q);
2020 	  }
2021 	else
2022 	  p = &DECL_CHAIN (*p);
2023       *p = vla_fields;
2024       if (gimple_omp_task_taskloop_p (ctx->stmt))
2025 	{
2026 	  /* Move fields corresponding to first and second _looptemp_
2027 	     clause first.  There are filled by GOMP_taskloop
2028 	     and thus need to be in specific positions.  */
2029 	  tree c1 = gimple_omp_task_clauses (ctx->stmt);
2030 	  c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
2031 	  tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2032 				     OMP_CLAUSE__LOOPTEMP_);
2033 	  tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2034 	  tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2035 	  p = &TYPE_FIELDS (ctx->record_type);
2036 	  while (*p)
2037 	    if (*p == f1 || *p == f2)
2038 	      *p = DECL_CHAIN (*p);
2039 	    else
2040 	      p = &DECL_CHAIN (*p);
2041 	  DECL_CHAIN (f1) = f2;
2042 	  DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2043 	  TYPE_FIELDS (ctx->record_type) = f1;
2044 	  if (ctx->srecord_type)
2045 	    {
2046 	      f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2047 	      f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2048 	      p = &TYPE_FIELDS (ctx->srecord_type);
2049 	      while (*p)
2050 		if (*p == f1 || *p == f2)
2051 		  *p = DECL_CHAIN (*p);
2052 		else
2053 		  p = &DECL_CHAIN (*p);
2054 	      DECL_CHAIN (f1) = f2;
2055 	      DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2056 	      TYPE_FIELDS (ctx->srecord_type) = f1;
2057 	    }
2058 	}
2059       layout_type (ctx->record_type);
2060       fixup_child_record_type (ctx);
2061       if (ctx->srecord_type)
2062 	layout_type (ctx->srecord_type);
2063       tree t = fold_convert_loc (loc, long_integer_type_node,
2064 				 TYPE_SIZE_UNIT (ctx->record_type));
2065       if (TREE_CODE (t) != INTEGER_CST)
2066 	{
2067 	  t = unshare_expr (t);
2068 	  walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2069 	}
2070       gimple_omp_task_set_arg_size (ctx->stmt, t);
2071       t = build_int_cst (long_integer_type_node,
2072 			 TYPE_ALIGN_UNIT (ctx->record_type));
2073       gimple_omp_task_set_arg_align (ctx->stmt, t);
2074     }
2075 }
2076 
2077 /* Find the enclosing offload context.  */
2078 
2079 static omp_context *
2080 enclosing_target_ctx (omp_context *ctx)
2081 {
2082   for (; ctx; ctx = ctx->outer)
2083     if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2084       break;
2085 
2086   return ctx;
2087 }
2088 
2089 /* Return true if ctx is part of an oacc kernels region.  */
2090 
2091 static bool
2092 ctx_in_oacc_kernels_region (omp_context *ctx)
2093 {
2094   for (;ctx != NULL; ctx = ctx->outer)
2095     {
2096       gimple *stmt = ctx->stmt;
2097       if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2098 	  && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2099 	return true;
2100     }
2101 
2102   return false;
2103 }
2104 
2105 /* Check the parallelism clauses inside a kernels regions.
2106    Until kernels handling moves to use the same loop indirection
2107    scheme as parallel, we need to do this checking early.  */
2108 
2109 static unsigned
2110 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2111 {
2112   bool checking = true;
2113   unsigned outer_mask = 0;
2114   unsigned this_mask = 0;
2115   bool has_seq = false, has_auto = false;
2116 
2117   if (ctx->outer)
2118     outer_mask = check_oacc_kernel_gwv (NULL,  ctx->outer);
2119   if (!stmt)
2120     {
2121       checking = false;
2122       if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2123 	return outer_mask;
2124       stmt = as_a <gomp_for *> (ctx->stmt);
2125     }
2126 
2127   for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2128     {
2129       switch (OMP_CLAUSE_CODE (c))
2130 	{
2131 	case OMP_CLAUSE_GANG:
2132 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2133 	  break;
2134 	case OMP_CLAUSE_WORKER:
2135 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2136 	  break;
2137 	case OMP_CLAUSE_VECTOR:
2138 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2139 	  break;
2140 	case OMP_CLAUSE_SEQ:
2141 	  has_seq = true;
2142 	  break;
2143 	case OMP_CLAUSE_AUTO:
2144 	  has_auto = true;
2145 	  break;
2146 	default:
2147 	  break;
2148 	}
2149     }
2150 
2151   if (checking)
2152     {
2153       if (has_seq && (this_mask || has_auto))
2154 	error_at (gimple_location (stmt), "%<seq%> overrides other"
2155 		  " OpenACC loop specifiers");
2156       else if (has_auto && this_mask)
2157 	error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2158 		  " OpenACC loop specifiers");
2159 
2160       if (this_mask & outer_mask)
2161 	error_at (gimple_location (stmt), "inner loop uses same"
2162 		  " OpenACC parallelism as containing loop");
2163     }
2164 
2165   return outer_mask | this_mask;
2166 }
2167 
2168 /* Scan a GIMPLE_OMP_FOR.  */
2169 
2170 static omp_context *
2171 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2172 {
2173   omp_context *ctx;
2174   size_t i;
2175   tree clauses = gimple_omp_for_clauses (stmt);
2176 
2177   ctx = new_omp_context (stmt, outer_ctx);
2178 
2179   if (is_gimple_omp_oacc (stmt))
2180     {
2181       omp_context *tgt = enclosing_target_ctx (outer_ctx);
2182 
2183       if (!tgt || is_oacc_parallel (tgt))
2184 	for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2185 	  {
2186 	    char const *check = NULL;
2187 
2188 	    switch (OMP_CLAUSE_CODE (c))
2189 	      {
2190 	      case OMP_CLAUSE_GANG:
2191 		check = "gang";
2192 		break;
2193 
2194 	      case OMP_CLAUSE_WORKER:
2195 		check = "worker";
2196 		break;
2197 
2198 	      case OMP_CLAUSE_VECTOR:
2199 		check = "vector";
2200 		break;
2201 
2202 	      default:
2203 		break;
2204 	      }
2205 
2206 	    if (check && OMP_CLAUSE_OPERAND (c, 0))
2207 	      error_at (gimple_location (stmt),
2208 			"argument not permitted on %qs clause in"
2209 			" OpenACC %<parallel%>", check);
2210 	  }
2211 
2212       if (tgt && is_oacc_kernels (tgt))
2213 	{
2214 	  /* Strip out reductions, as they are not  handled yet.  */
2215 	  tree *prev_ptr = &clauses;
2216 
2217 	  while (tree probe = *prev_ptr)
2218 	    {
2219 	      tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2220 
2221 	      if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2222 		*prev_ptr = *next_ptr;
2223 	      else
2224 		prev_ptr = next_ptr;
2225 	    }
2226 
2227 	  gimple_omp_for_set_clauses (stmt, clauses);
2228 	  check_oacc_kernel_gwv (stmt, ctx);
2229 	}
2230     }
2231 
2232   scan_sharing_clauses (clauses, ctx);
2233 
2234   scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2235   for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2236     {
2237       scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2238       scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2239       scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2240       scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2241     }
2242   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2243   return ctx;
2244 }
2245 
2246 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD.  */
2247 
2248 static void
2249 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2250 	       omp_context *outer_ctx)
2251 {
2252   gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2253   gsi_replace (gsi, bind, false);
2254   gimple_seq seq = NULL;
2255   gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2256   tree cond = create_tmp_var_raw (integer_type_node);
2257   DECL_CONTEXT (cond) = current_function_decl;
2258   DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2259   gimple_bind_set_vars (bind, cond);
2260   gimple_call_set_lhs (g, cond);
2261   gimple_seq_add_stmt (&seq, g);
2262   tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2263   tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2264   tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2265   g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2266   gimple_seq_add_stmt (&seq, g);
2267   g = gimple_build_label (lab1);
2268   gimple_seq_add_stmt (&seq, g);
2269   gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2270   gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2271   tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2272   OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2273   gimple_omp_for_set_clauses (new_stmt, clause);
2274   gimple_seq_add_stmt (&seq, new_stmt);
2275   g = gimple_build_goto (lab3);
2276   gimple_seq_add_stmt (&seq, g);
2277   g = gimple_build_label (lab2);
2278   gimple_seq_add_stmt (&seq, g);
2279   gimple_seq_add_stmt (&seq, stmt);
2280   g = gimple_build_label (lab3);
2281   gimple_seq_add_stmt (&seq, g);
2282   gimple_bind_set_body (bind, seq);
2283   update_stmt (bind);
2284   scan_omp_for (new_stmt, outer_ctx);
2285   scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2286 }
2287 
2288 /* Scan an OpenMP sections directive.  */
2289 
2290 static void
2291 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2292 {
2293   omp_context *ctx;
2294 
2295   ctx = new_omp_context (stmt, outer_ctx);
2296   scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2297   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2298 }
2299 
2300 /* Scan an OpenMP single directive.  */
2301 
2302 static void
2303 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2304 {
2305   omp_context *ctx;
2306   tree name;
2307 
2308   ctx = new_omp_context (stmt, outer_ctx);
2309   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2310   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2311   name = create_tmp_var_name (".omp_copy_s");
2312   name = build_decl (gimple_location (stmt),
2313 		     TYPE_DECL, name, ctx->record_type);
2314   TYPE_NAME (ctx->record_type) = name;
2315 
2316   scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2317   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2318 
2319   if (TYPE_FIELDS (ctx->record_type) == NULL)
2320     ctx->record_type = NULL;
2321   else
2322     layout_type (ctx->record_type);
2323 }
2324 
2325 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2326    used in the corresponding offloaded function are restrict.  */
2327 
2328 static bool
2329 omp_target_base_pointers_restrict_p (tree clauses)
2330 {
2331   /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2332      used by OpenACC.  */
2333   if (flag_openacc == 0)
2334     return false;
2335 
2336   /* I.  Basic example:
2337 
2338        void foo (void)
2339        {
2340 	 unsigned int a[2], b[2];
2341 
2342 	 #pragma acc kernels \
2343 	   copyout (a) \
2344 	   copyout (b)
2345 	 {
2346 	   a[0] = 0;
2347 	   b[0] = 1;
2348 	 }
2349        }
2350 
2351      After gimplification, we have:
2352 
2353        #pragma omp target oacc_kernels \
2354 	 map(force_from:a [len: 8]) \
2355 	 map(force_from:b [len: 8])
2356        {
2357 	 a[0] = 0;
2358 	 b[0] = 1;
2359        }
2360 
2361      Because both mappings have the force prefix, we know that they will be
2362      allocated when calling the corresponding offloaded function, which means we
2363      can mark the base pointers for a and b in the offloaded function as
2364      restrict.  */
2365 
2366   tree c;
2367   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2368     {
2369       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2370 	return false;
2371 
2372       switch (OMP_CLAUSE_MAP_KIND (c))
2373 	{
2374 	case GOMP_MAP_FORCE_ALLOC:
2375 	case GOMP_MAP_FORCE_TO:
2376 	case GOMP_MAP_FORCE_FROM:
2377 	case GOMP_MAP_FORCE_TOFROM:
2378 	  break;
2379 	default:
2380 	  return false;
2381 	}
2382     }
2383 
2384   return true;
2385 }
2386 
2387 /* Scan a GIMPLE_OMP_TARGET.  */
2388 
2389 static void
2390 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2391 {
2392   omp_context *ctx;
2393   tree name;
2394   bool offloaded = is_gimple_omp_offloaded (stmt);
2395   tree clauses = gimple_omp_target_clauses (stmt);
2396 
2397   ctx = new_omp_context (stmt, outer_ctx);
2398   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2399   ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2400   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2401   name = create_tmp_var_name (".omp_data_t");
2402   name = build_decl (gimple_location (stmt),
2403 		     TYPE_DECL, name, ctx->record_type);
2404   DECL_ARTIFICIAL (name) = 1;
2405   DECL_NAMELESS (name) = 1;
2406   TYPE_NAME (ctx->record_type) = name;
2407   TYPE_ARTIFICIAL (ctx->record_type) = 1;
2408 
2409   bool base_pointers_restrict = false;
2410   if (offloaded)
2411     {
2412       create_omp_child_function (ctx, false);
2413       gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2414 
2415       base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2416       if (base_pointers_restrict
2417 	  && dump_file && (dump_flags & TDF_DETAILS))
2418 	fprintf (dump_file,
2419 		 "Base pointers in offloaded function are restrict\n");
2420     }
2421 
2422   scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2423   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2424 
2425   if (TYPE_FIELDS (ctx->record_type) == NULL)
2426     ctx->record_type = ctx->receiver_decl = NULL;
2427   else
2428     {
2429       TYPE_FIELDS (ctx->record_type)
2430 	= nreverse (TYPE_FIELDS (ctx->record_type));
2431       if (flag_checking)
2432 	{
2433 	  unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2434 	  for (tree field = TYPE_FIELDS (ctx->record_type);
2435 	       field;
2436 	       field = DECL_CHAIN (field))
2437 	    gcc_assert (DECL_ALIGN (field) == align);
2438 	}
2439       layout_type (ctx->record_type);
2440       if (offloaded)
2441 	fixup_child_record_type (ctx);
2442     }
2443 }
2444 
2445 /* Scan an OpenMP teams directive.  */
2446 
2447 static void
2448 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2449 {
2450   omp_context *ctx = new_omp_context (stmt, outer_ctx);
2451   scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2452   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2453 }
2454 
2455 /* Check nesting restrictions.  */
2456 static bool
2457 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2458 {
2459   tree c;
2460 
2461   if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2462     /* GRID_BODY is an artificial construct, nesting rules will be checked in
2463        the original copy of its contents.  */
2464     return true;
2465 
2466   /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2467      inside an OpenACC CTX.  */
2468   if (!(is_gimple_omp (stmt)
2469 	&& is_gimple_omp_oacc (stmt))
2470       /* Except for atomic codes that we share with OpenMP.  */
2471       && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2472 	   || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2473     {
2474       if (oacc_get_fn_attrib (cfun->decl) != NULL)
2475 	{
2476 	  error_at (gimple_location (stmt),
2477 		    "non-OpenACC construct inside of OpenACC routine");
2478 	  return false;
2479 	}
2480       else
2481 	for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2482 	  if (is_gimple_omp (octx->stmt)
2483 	      && is_gimple_omp_oacc (octx->stmt))
2484 	    {
2485 	      error_at (gimple_location (stmt),
2486 			"non-OpenACC construct inside of OpenACC region");
2487 	      return false;
2488 	    }
2489     }
2490 
2491   if (ctx != NULL)
2492     {
2493       if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2494 	  && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2495 	{
2496 	  c = NULL_TREE;
2497 	  if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2498 	    {
2499 	      c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2500 	      if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2501 		{
2502 		  if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2503 		      && (ctx->outer == NULL
2504 			  || !gimple_omp_for_combined_into_p (ctx->stmt)
2505 			  || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2506 			  || (gimple_omp_for_kind (ctx->outer->stmt)
2507 			      != GF_OMP_FOR_KIND_FOR)
2508 			  || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2509 		    {
2510 		      error_at (gimple_location (stmt),
2511 				"%<ordered simd threads%> must be closely "
2512 				"nested inside of %<for simd%> region");
2513 		      return false;
2514 		    }
2515 		  return true;
2516 		}
2517 	    }
2518 	  error_at (gimple_location (stmt),
2519 		    "OpenMP constructs other than %<#pragma omp ordered simd%>"
2520 		    " may not be nested inside %<simd%> region");
2521 	  return false;
2522 	}
2523       else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2524 	{
2525 	  if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2526 	       || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2527 		   && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2528 	      && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2529 	    {
2530 	      error_at (gimple_location (stmt),
2531 			"only %<distribute%> or %<parallel%> regions are "
2532 			"allowed to be strictly nested inside %<teams%> "
2533 			"region");
2534 	      return false;
2535 	    }
2536 	}
2537     }
2538   switch (gimple_code (stmt))
2539     {
2540     case GIMPLE_OMP_FOR:
2541       if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2542 	return true;
2543       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2544 	{
2545 	  if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2546 	    {
2547 	      error_at (gimple_location (stmt),
2548 			"%<distribute%> region must be strictly nested "
2549 			"inside %<teams%> construct");
2550 	      return false;
2551 	    }
2552 	  return true;
2553 	}
2554       /* We split taskloop into task and nested taskloop in it.  */
2555       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2556 	return true;
2557       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2558 	{
2559 	  bool ok = false;
2560 
2561 	  if (ctx)
2562 	    switch (gimple_code (ctx->stmt))
2563 	      {
2564 	      case GIMPLE_OMP_FOR:
2565 		ok = (gimple_omp_for_kind (ctx->stmt)
2566 		      == GF_OMP_FOR_KIND_OACC_LOOP);
2567 		break;
2568 
2569 	      case GIMPLE_OMP_TARGET:
2570 		switch (gimple_omp_target_kind (ctx->stmt))
2571 		  {
2572 		  case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2573 		  case GF_OMP_TARGET_KIND_OACC_KERNELS:
2574 		    ok = true;
2575 		    break;
2576 
2577 		  default:
2578 		    break;
2579 		  }
2580 
2581 	      default:
2582 		break;
2583 	      }
2584 	  else if (oacc_get_fn_attrib (current_function_decl))
2585 	    ok = true;
2586 	  if (!ok)
2587 	    {
2588 	      error_at (gimple_location (stmt),
2589 			"OpenACC loop directive must be associated with"
2590 			" an OpenACC compute region");
2591 	      return false;
2592 	    }
2593 	}
2594       /* FALLTHRU */
2595     case GIMPLE_CALL:
2596       if (is_gimple_call (stmt)
2597 	  && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2598 	      == BUILT_IN_GOMP_CANCEL
2599 	      || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2600 		 == BUILT_IN_GOMP_CANCELLATION_POINT))
2601 	{
2602 	  const char *bad = NULL;
2603 	  const char *kind = NULL;
2604 	  const char *construct
2605 	    = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2606 	       == BUILT_IN_GOMP_CANCEL)
2607 	      ? "#pragma omp cancel"
2608 	      : "#pragma omp cancellation point";
2609 	  if (ctx == NULL)
2610 	    {
2611 	      error_at (gimple_location (stmt), "orphaned %qs construct",
2612 			construct);
2613 	      return false;
2614 	    }
2615 	  switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2616 		  ? tree_to_shwi (gimple_call_arg (stmt, 0))
2617 		  : 0)
2618 	    {
2619 	    case 1:
2620 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2621 		bad = "#pragma omp parallel";
2622 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2623 		       == BUILT_IN_GOMP_CANCEL
2624 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2625 		ctx->cancellable = true;
2626 	      kind = "parallel";
2627 	      break;
2628 	    case 2:
2629 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2630 		  || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2631 		bad = "#pragma omp for";
2632 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2633 		       == BUILT_IN_GOMP_CANCEL
2634 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2635 		{
2636 		  ctx->cancellable = true;
2637 		  if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2638 				       OMP_CLAUSE_NOWAIT))
2639 		    warning_at (gimple_location (stmt), 0,
2640 				"%<#pragma omp cancel for%> inside "
2641 				"%<nowait%> for construct");
2642 		  if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2643 				       OMP_CLAUSE_ORDERED))
2644 		    warning_at (gimple_location (stmt), 0,
2645 				"%<#pragma omp cancel for%> inside "
2646 				"%<ordered%> for construct");
2647 		}
2648 	      kind = "for";
2649 	      break;
2650 	    case 4:
2651 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2652 		  && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2653 		bad = "#pragma omp sections";
2654 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2655 		       == BUILT_IN_GOMP_CANCEL
2656 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2657 		{
2658 		  if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2659 		    {
2660 		      ctx->cancellable = true;
2661 		      if (omp_find_clause (gimple_omp_sections_clauses
2662 								(ctx->stmt),
2663 					   OMP_CLAUSE_NOWAIT))
2664 			warning_at (gimple_location (stmt), 0,
2665 				    "%<#pragma omp cancel sections%> inside "
2666 				    "%<nowait%> sections construct");
2667 		    }
2668 		  else
2669 		    {
2670 		      gcc_assert (ctx->outer
2671 				  && gimple_code (ctx->outer->stmt)
2672 				     == GIMPLE_OMP_SECTIONS);
2673 		      ctx->outer->cancellable = true;
2674 		      if (omp_find_clause (gimple_omp_sections_clauses
2675 							(ctx->outer->stmt),
2676 					   OMP_CLAUSE_NOWAIT))
2677 			warning_at (gimple_location (stmt), 0,
2678 				    "%<#pragma omp cancel sections%> inside "
2679 				    "%<nowait%> sections construct");
2680 		    }
2681 		}
2682 	      kind = "sections";
2683 	      break;
2684 	    case 8:
2685 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2686 		bad = "#pragma omp task";
2687 	      else
2688 		{
2689 		  for (omp_context *octx = ctx->outer;
2690 		       octx; octx = octx->outer)
2691 		    {
2692 		      switch (gimple_code (octx->stmt))
2693 			{
2694 			case GIMPLE_OMP_TASKGROUP:
2695 			  break;
2696 			case GIMPLE_OMP_TARGET:
2697 			  if (gimple_omp_target_kind (octx->stmt)
2698 			      != GF_OMP_TARGET_KIND_REGION)
2699 			    continue;
2700 			  /* FALLTHRU */
2701 			case GIMPLE_OMP_PARALLEL:
2702 			case GIMPLE_OMP_TEAMS:
2703 			  error_at (gimple_location (stmt),
2704 				    "%<%s taskgroup%> construct not closely "
2705 				    "nested inside of %<taskgroup%> region",
2706 				    construct);
2707 			  return false;
2708 			default:
2709 			  continue;
2710 			}
2711 		      break;
2712 		    }
2713 		  ctx->cancellable = true;
2714 		}
2715 	      kind = "taskgroup";
2716 	      break;
2717 	    default:
2718 	      error_at (gimple_location (stmt), "invalid arguments");
2719 	      return false;
2720 	    }
2721 	  if (bad)
2722 	    {
2723 	      error_at (gimple_location (stmt),
2724 			"%<%s %s%> construct not closely nested inside of %qs",
2725 			construct, kind, bad);
2726 	      return false;
2727 	    }
2728 	}
2729       /* FALLTHRU */
2730     case GIMPLE_OMP_SECTIONS:
2731     case GIMPLE_OMP_SINGLE:
2732       for (; ctx != NULL; ctx = ctx->outer)
2733 	switch (gimple_code (ctx->stmt))
2734 	  {
2735 	  case GIMPLE_OMP_FOR:
2736 	    if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2737 		&& gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2738 	      break;
2739 	    /* FALLTHRU */
2740 	  case GIMPLE_OMP_SECTIONS:
2741 	  case GIMPLE_OMP_SINGLE:
2742 	  case GIMPLE_OMP_ORDERED:
2743 	  case GIMPLE_OMP_MASTER:
2744 	  case GIMPLE_OMP_TASK:
2745 	  case GIMPLE_OMP_CRITICAL:
2746 	    if (is_gimple_call (stmt))
2747 	      {
2748 		if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2749 		    != BUILT_IN_GOMP_BARRIER)
2750 		  return true;
2751 		error_at (gimple_location (stmt),
2752 			  "barrier region may not be closely nested inside "
2753 			  "of work-sharing, %<critical%>, %<ordered%>, "
2754 			  "%<master%>, explicit %<task%> or %<taskloop%> "
2755 			  "region");
2756 		return false;
2757 	      }
2758 	    error_at (gimple_location (stmt),
2759 		      "work-sharing region may not be closely nested inside "
2760 		      "of work-sharing, %<critical%>, %<ordered%>, "
2761 		      "%<master%>, explicit %<task%> or %<taskloop%> region");
2762 	    return false;
2763 	  case GIMPLE_OMP_PARALLEL:
2764 	  case GIMPLE_OMP_TEAMS:
2765 	    return true;
2766 	  case GIMPLE_OMP_TARGET:
2767 	    if (gimple_omp_target_kind (ctx->stmt)
2768 		== GF_OMP_TARGET_KIND_REGION)
2769 	      return true;
2770 	    break;
2771 	  default:
2772 	    break;
2773 	  }
2774       break;
2775     case GIMPLE_OMP_MASTER:
2776       for (; ctx != NULL; ctx = ctx->outer)
2777 	switch (gimple_code (ctx->stmt))
2778 	  {
2779 	  case GIMPLE_OMP_FOR:
2780 	    if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2781 		&& gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2782 	      break;
2783 	    /* FALLTHRU */
2784 	  case GIMPLE_OMP_SECTIONS:
2785 	  case GIMPLE_OMP_SINGLE:
2786 	  case GIMPLE_OMP_TASK:
2787 	    error_at (gimple_location (stmt),
2788 		      "%<master%> region may not be closely nested inside "
2789 		      "of work-sharing, explicit %<task%> or %<taskloop%> "
2790 		      "region");
2791 	    return false;
2792 	  case GIMPLE_OMP_PARALLEL:
2793 	  case GIMPLE_OMP_TEAMS:
2794 	    return true;
2795 	  case GIMPLE_OMP_TARGET:
2796 	    if (gimple_omp_target_kind (ctx->stmt)
2797 		== GF_OMP_TARGET_KIND_REGION)
2798 	      return true;
2799 	    break;
2800 	  default:
2801 	    break;
2802 	  }
2803       break;
2804     case GIMPLE_OMP_TASK:
2805       for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2806 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2807 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2808 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2809 	  {
2810 	    enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2811 	    error_at (OMP_CLAUSE_LOCATION (c),
2812 		      "%<depend(%s)%> is only allowed in %<omp ordered%>",
2813 		      kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2814 	    return false;
2815 	  }
2816       break;
2817     case GIMPLE_OMP_ORDERED:
2818       for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2819 	   c; c = OMP_CLAUSE_CHAIN (c))
2820 	{
2821 	  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2822 	    {
2823 	      gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2824 			  || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2825 	      continue;
2826 	    }
2827 	  enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2828 	  if (kind == OMP_CLAUSE_DEPEND_SOURCE
2829 	      || kind == OMP_CLAUSE_DEPEND_SINK)
2830 	    {
2831 	      tree oclause;
2832 	      /* Look for containing ordered(N) loop.  */
2833 	      if (ctx == NULL
2834 		  || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2835 		  || (oclause
2836 			= omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2837 					   OMP_CLAUSE_ORDERED)) == NULL_TREE)
2838 		{
2839 		  error_at (OMP_CLAUSE_LOCATION (c),
2840 			    "%<ordered%> construct with %<depend%> clause "
2841 			    "must be closely nested inside an %<ordered%> "
2842 			    "loop");
2843 		  return false;
2844 		}
2845 	      else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2846 		{
2847 		  error_at (OMP_CLAUSE_LOCATION (c),
2848 			    "%<ordered%> construct with %<depend%> clause "
2849 			    "must be closely nested inside a loop with "
2850 			    "%<ordered%> clause with a parameter");
2851 		  return false;
2852 		}
2853 	    }
2854 	  else
2855 	    {
2856 	      error_at (OMP_CLAUSE_LOCATION (c),
2857 			"invalid depend kind in omp %<ordered%> %<depend%>");
2858 	      return false;
2859 	    }
2860 	}
2861       c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2862       if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2863 	{
2864 	  /* ordered simd must be closely nested inside of simd region,
2865 	     and simd region must not encounter constructs other than
2866 	     ordered simd, therefore ordered simd may be either orphaned,
2867 	     or ctx->stmt must be simd.  The latter case is handled already
2868 	     earlier.  */
2869 	  if (ctx != NULL)
2870 	    {
2871 	      error_at (gimple_location (stmt),
2872 			"%<ordered%> %<simd%> must be closely nested inside "
2873 			"%<simd%> region");
2874 	      return false;
2875 	    }
2876 	}
2877       for (; ctx != NULL; ctx = ctx->outer)
2878 	switch (gimple_code (ctx->stmt))
2879 	  {
2880 	  case GIMPLE_OMP_CRITICAL:
2881 	  case GIMPLE_OMP_TASK:
2882 	  case GIMPLE_OMP_ORDERED:
2883 	  ordered_in_taskloop:
2884 	    error_at (gimple_location (stmt),
2885 		      "%<ordered%> region may not be closely nested inside "
2886 		      "of %<critical%>, %<ordered%>, explicit %<task%> or "
2887 		      "%<taskloop%> region");
2888 	    return false;
2889 	  case GIMPLE_OMP_FOR:
2890 	    if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2891 	      goto ordered_in_taskloop;
2892 	    tree o;
2893 	    o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2894 				 OMP_CLAUSE_ORDERED);
2895 	    if (o == NULL)
2896 	      {
2897 		error_at (gimple_location (stmt),
2898 			  "%<ordered%> region must be closely nested inside "
2899 			  "a loop region with an %<ordered%> clause");
2900 		return false;
2901 	      }
2902 	    if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
2903 		&& omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
2904 	      {
2905 		error_at (gimple_location (stmt),
2906 			  "%<ordered%> region without %<depend%> clause may "
2907 			  "not be closely nested inside a loop region with "
2908 			  "an %<ordered%> clause with a parameter");
2909 		return false;
2910 	      }
2911 	    return true;
2912 	  case GIMPLE_OMP_TARGET:
2913 	    if (gimple_omp_target_kind (ctx->stmt)
2914 		!= GF_OMP_TARGET_KIND_REGION)
2915 	      break;
2916 	    /* FALLTHRU */
2917 	  case GIMPLE_OMP_PARALLEL:
2918 	  case GIMPLE_OMP_TEAMS:
2919 	    error_at (gimple_location (stmt),
2920 		      "%<ordered%> region must be closely nested inside "
2921 		      "a loop region with an %<ordered%> clause");
2922 	    return false;
2923 	  default:
2924 	    break;
2925 	  }
2926       break;
2927     case GIMPLE_OMP_CRITICAL:
2928       {
2929 	tree this_stmt_name
2930 	  = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2931 	for (; ctx != NULL; ctx = ctx->outer)
2932 	  if (gomp_critical *other_crit
2933 	        = dyn_cast <gomp_critical *> (ctx->stmt))
2934 	    if (this_stmt_name == gimple_omp_critical_name (other_crit))
2935 	      {
2936 		error_at (gimple_location (stmt),
2937 			  "%<critical%> region may not be nested inside "
2938 			   "a %<critical%> region with the same name");
2939 		return false;
2940 	      }
2941       }
2942       break;
2943     case GIMPLE_OMP_TEAMS:
2944       if (ctx == NULL
2945 	  || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2946 	  || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2947 	{
2948 	  error_at (gimple_location (stmt),
2949 		    "%<teams%> construct not closely nested inside of "
2950 		    "%<target%> construct");
2951 	  return false;
2952 	}
2953       break;
2954     case GIMPLE_OMP_TARGET:
2955       for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2956 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2957 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2958 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2959 	  {
2960 	    enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2961 	    error_at (OMP_CLAUSE_LOCATION (c),
2962 		      "%<depend(%s)%> is only allowed in %<omp ordered%>",
2963 		      kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2964 	    return false;
2965 	  }
2966       if (is_gimple_omp_offloaded (stmt)
2967 	  && oacc_get_fn_attrib (cfun->decl) != NULL)
2968 	{
2969 	  error_at (gimple_location (stmt),
2970 		    "OpenACC region inside of OpenACC routine, nested "
2971 		    "parallelism not supported yet");
2972 	  return false;
2973 	}
2974       for (; ctx != NULL; ctx = ctx->outer)
2975 	{
2976 	  if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2977 	    {
2978 	      if (is_gimple_omp (stmt)
2979 		  && is_gimple_omp_oacc (stmt)
2980 		  && is_gimple_omp (ctx->stmt))
2981 		{
2982 		  error_at (gimple_location (stmt),
2983 			    "OpenACC construct inside of non-OpenACC region");
2984 		  return false;
2985 		}
2986 	      continue;
2987 	    }
2988 
2989 	  const char *stmt_name, *ctx_stmt_name;
2990 	  switch (gimple_omp_target_kind (stmt))
2991 	    {
2992 	    case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2993 	    case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2994 	    case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2995 	    case GF_OMP_TARGET_KIND_ENTER_DATA:
2996 	      stmt_name = "target enter data"; break;
2997 	    case GF_OMP_TARGET_KIND_EXIT_DATA:
2998 	      stmt_name = "target exit data"; break;
2999 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3000 	    case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3001 	    case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3002 	    case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3003 	    case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3004 	      stmt_name = "enter/exit data"; break;
3005 	    case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3006 	      break;
3007 	    default: gcc_unreachable ();
3008 	    }
3009 	  switch (gimple_omp_target_kind (ctx->stmt))
3010 	    {
3011 	    case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3012 	    case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3013 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3014 	      ctx_stmt_name = "parallel"; break;
3015 	    case GF_OMP_TARGET_KIND_OACC_KERNELS:
3016 	      ctx_stmt_name = "kernels"; break;
3017 	    case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3018 	    case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3019 	      ctx_stmt_name = "host_data"; break;
3020 	    default: gcc_unreachable ();
3021 	    }
3022 
3023 	  /* OpenACC/OpenMP mismatch?  */
3024 	  if (is_gimple_omp_oacc (stmt)
3025 	      != is_gimple_omp_oacc (ctx->stmt))
3026 	    {
3027 	      error_at (gimple_location (stmt),
3028 			"%s %qs construct inside of %s %qs region",
3029 			(is_gimple_omp_oacc (stmt)
3030 			 ? "OpenACC" : "OpenMP"), stmt_name,
3031 			(is_gimple_omp_oacc (ctx->stmt)
3032 			 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3033 	      return false;
3034 	    }
3035 	  if (is_gimple_omp_offloaded (ctx->stmt))
3036 	    {
3037 	      /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX.  */
3038 	      if (is_gimple_omp_oacc (ctx->stmt))
3039 		{
3040 		  error_at (gimple_location (stmt),
3041 			    "%qs construct inside of %qs region",
3042 			    stmt_name, ctx_stmt_name);
3043 		  return false;
3044 		}
3045 	      else
3046 		{
3047 		  warning_at (gimple_location (stmt), 0,
3048 			      "%qs construct inside of %qs region",
3049 			      stmt_name, ctx_stmt_name);
3050 		}
3051 	    }
3052 	}
3053       break;
3054     default:
3055       break;
3056     }
3057   return true;
3058 }
3059 
3060 
3061 /* Helper function scan_omp.
3062 
3063    Callback for walk_tree or operators in walk_gimple_stmt used to
3064    scan for OMP directives in TP.  */
3065 
3066 static tree
3067 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3068 {
3069   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3070   omp_context *ctx = (omp_context *) wi->info;
3071   tree t = *tp;
3072 
3073   switch (TREE_CODE (t))
3074     {
3075     case VAR_DECL:
3076     case PARM_DECL:
3077     case LABEL_DECL:
3078     case RESULT_DECL:
3079       if (ctx)
3080 	{
3081 	  tree repl = remap_decl (t, &ctx->cb);
3082 	  gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3083 	  *tp = repl;
3084 	}
3085       break;
3086 
3087     default:
3088       if (ctx && TYPE_P (t))
3089 	*tp = remap_type (t, &ctx->cb);
3090       else if (!DECL_P (t))
3091 	{
3092 	  *walk_subtrees = 1;
3093 	  if (ctx)
3094 	    {
3095 	      tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3096 	      if (tem != TREE_TYPE (t))
3097 		{
3098 		  if (TREE_CODE (t) == INTEGER_CST)
3099 		    *tp = wide_int_to_tree (tem, t);
3100 		  else
3101 		    TREE_TYPE (t) = tem;
3102 		}
3103 	    }
3104 	}
3105       break;
3106     }
3107 
3108   return NULL_TREE;
3109 }
3110 
3111 /* Return true if FNDECL is a setjmp or a longjmp.  */
3112 
3113 static bool
3114 setjmp_or_longjmp_p (const_tree fndecl)
3115 {
3116   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3117       && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3118 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3119     return true;
3120 
3121   tree declname = DECL_NAME (fndecl);
3122   if (!declname)
3123     return false;
3124   const char *name = IDENTIFIER_POINTER (declname);
3125   return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3126 }
3127 
3128 
3129 /* Helper function for scan_omp.
3130 
3131    Callback for walk_gimple_stmt used to scan for OMP directives in
3132    the current statement in GSI.  */
3133 
3134 static tree
3135 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3136 		 struct walk_stmt_info *wi)
3137 {
3138   gimple *stmt = gsi_stmt (*gsi);
3139   omp_context *ctx = (omp_context *) wi->info;
3140 
3141   if (gimple_has_location (stmt))
3142     input_location = gimple_location (stmt);
3143 
3144   /* Check the nesting restrictions.  */
3145   bool remove = false;
3146   if (is_gimple_omp (stmt))
3147     remove = !check_omp_nesting_restrictions (stmt, ctx);
3148   else if (is_gimple_call (stmt))
3149     {
3150       tree fndecl = gimple_call_fndecl (stmt);
3151       if (fndecl)
3152 	{
3153 	  if (setjmp_or_longjmp_p (fndecl)
3154 	      && ctx
3155 	      && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3156 	      && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3157 	    {
3158 	      remove = true;
3159 	      error_at (gimple_location (stmt),
3160 			"setjmp/longjmp inside simd construct");
3161 	    }
3162 	  else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3163 	    switch (DECL_FUNCTION_CODE (fndecl))
3164 	      {
3165 	      case BUILT_IN_GOMP_BARRIER:
3166 	      case BUILT_IN_GOMP_CANCEL:
3167 	      case BUILT_IN_GOMP_CANCELLATION_POINT:
3168 	      case BUILT_IN_GOMP_TASKYIELD:
3169 	      case BUILT_IN_GOMP_TASKWAIT:
3170 	      case BUILT_IN_GOMP_TASKGROUP_START:
3171 	      case BUILT_IN_GOMP_TASKGROUP_END:
3172 		remove = !check_omp_nesting_restrictions (stmt, ctx);
3173 		break;
3174 	      default:
3175 		break;
3176 	      }
3177 	}
3178     }
3179   if (remove)
3180     {
3181       stmt = gimple_build_nop ();
3182       gsi_replace (gsi, stmt, false);
3183     }
3184 
3185   *handled_ops_p = true;
3186 
3187   switch (gimple_code (stmt))
3188     {
3189     case GIMPLE_OMP_PARALLEL:
3190       taskreg_nesting_level++;
3191       scan_omp_parallel (gsi, ctx);
3192       taskreg_nesting_level--;
3193       break;
3194 
3195     case GIMPLE_OMP_TASK:
3196       taskreg_nesting_level++;
3197       scan_omp_task (gsi, ctx);
3198       taskreg_nesting_level--;
3199       break;
3200 
3201     case GIMPLE_OMP_FOR:
3202       if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3203 	    & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3204 	  && omp_maybe_offloaded_ctx (ctx)
3205 	  && omp_max_simt_vf ())
3206 	scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3207       else
3208 	scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3209       break;
3210 
3211     case GIMPLE_OMP_SECTIONS:
3212       scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3213       break;
3214 
3215     case GIMPLE_OMP_SINGLE:
3216       scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3217       break;
3218 
3219     case GIMPLE_OMP_SECTION:
3220     case GIMPLE_OMP_MASTER:
3221     case GIMPLE_OMP_TASKGROUP:
3222     case GIMPLE_OMP_ORDERED:
3223     case GIMPLE_OMP_CRITICAL:
3224     case GIMPLE_OMP_GRID_BODY:
3225       ctx = new_omp_context (stmt, ctx);
3226       scan_omp (gimple_omp_body_ptr (stmt), ctx);
3227       break;
3228 
3229     case GIMPLE_OMP_TARGET:
3230       scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3231       break;
3232 
3233     case GIMPLE_OMP_TEAMS:
3234       scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3235       break;
3236 
3237     case GIMPLE_BIND:
3238       {
3239 	tree var;
3240 
3241 	*handled_ops_p = false;
3242 	if (ctx)
3243 	  for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3244 	       var ;
3245 	       var = DECL_CHAIN (var))
3246 	    insert_decl_map (&ctx->cb, var, var);
3247       }
3248       break;
3249     default:
3250       *handled_ops_p = false;
3251       break;
3252     }
3253 
3254   return NULL_TREE;
3255 }
3256 
3257 
3258 /* Scan all the statements starting at the current statement.  CTX
3259    contains context information about the OMP directives and
3260    clauses found during the scan.  */
3261 
3262 static void
3263 scan_omp (gimple_seq *body_p, omp_context *ctx)
3264 {
3265   location_t saved_location;
3266   struct walk_stmt_info wi;
3267 
3268   memset (&wi, 0, sizeof (wi));
3269   wi.info = ctx;
3270   wi.want_locations = true;
3271 
3272   saved_location = input_location;
3273   walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3274   input_location = saved_location;
3275 }
3276 
3277 /* Re-gimplification and code generation routines.  */
3278 
3279 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3280    of BIND if in a method.  */
3281 
3282 static void
3283 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3284 {
3285   if (DECL_ARGUMENTS (current_function_decl)
3286       && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3287       && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3288 	  == POINTER_TYPE))
3289     {
3290       tree vars = gimple_bind_vars (bind);
3291       for (tree *pvar = &vars; *pvar; )
3292 	if (omp_member_access_dummy_var (*pvar))
3293 	  *pvar = DECL_CHAIN (*pvar);
3294 	else
3295 	  pvar = &DECL_CHAIN (*pvar);
3296       gimple_bind_set_vars (bind, vars);
3297     }
3298 }
3299 
3300 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3301    block and its subblocks.  */
3302 
3303 static void
3304 remove_member_access_dummy_vars (tree block)
3305 {
3306   for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3307     if (omp_member_access_dummy_var (*pvar))
3308       *pvar = DECL_CHAIN (*pvar);
3309     else
3310       pvar = &DECL_CHAIN (*pvar);
3311 
3312   for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3313     remove_member_access_dummy_vars (block);
3314 }
3315 
3316 /* If a context was created for STMT when it was scanned, return it.  */
3317 
3318 static omp_context *
3319 maybe_lookup_ctx (gimple *stmt)
3320 {
3321   splay_tree_node n;
3322   n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3323   return n ? (omp_context *) n->value : NULL;
3324 }
3325 
3326 
3327 /* Find the mapping for DECL in CTX or the immediately enclosing
3328    context that has a mapping for DECL.
3329 
3330    If CTX is a nested parallel directive, we may have to use the decl
3331    mappings created in CTX's parent context.  Suppose that we have the
3332    following parallel nesting (variable UIDs showed for clarity):
3333 
3334 	iD.1562 = 0;
3335      	#omp parallel shared(iD.1562)		-> outer parallel
3336 	  iD.1562 = iD.1562 + 1;
3337 
3338 	  #omp parallel shared (iD.1562)	-> inner parallel
3339 	     iD.1562 = iD.1562 - 1;
3340 
3341    Each parallel structure will create a distinct .omp_data_s structure
3342    for copying iD.1562 in/out of the directive:
3343 
3344   	outer parallel		.omp_data_s.1.i -> iD.1562
3345 	inner parallel		.omp_data_s.2.i -> iD.1562
3346 
3347    A shared variable mapping will produce a copy-out operation before
3348    the parallel directive and a copy-in operation after it.  So, in
3349    this case we would have:
3350 
3351   	iD.1562 = 0;
3352 	.omp_data_o.1.i = iD.1562;
3353 	#omp parallel shared(iD.1562)		-> outer parallel
3354 	  .omp_data_i.1 = &.omp_data_o.1
3355 	  .omp_data_i.1->i = .omp_data_i.1->i + 1;
3356 
3357 	  .omp_data_o.2.i = iD.1562;		-> **
3358 	  #omp parallel shared(iD.1562)		-> inner parallel
3359 	    .omp_data_i.2 = &.omp_data_o.2
3360 	    .omp_data_i.2->i = .omp_data_i.2->i - 1;
3361 
3362 
3363     ** This is a problem.  The symbol iD.1562 cannot be referenced
3364        inside the body of the outer parallel region.  But since we are
3365        emitting this copy operation while expanding the inner parallel
3366        directive, we need to access the CTX structure of the outer
3367        parallel directive to get the correct mapping:
3368 
3369 	  .omp_data_o.2.i = .omp_data_i.1->i
3370 
3371     Since there may be other workshare or parallel directives enclosing
3372     the parallel directive, it may be necessary to walk up the context
3373     parent chain.  This is not a problem in general because nested
3374     parallelism happens only rarely.  */
3375 
3376 static tree
3377 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3378 {
3379   tree t;
3380   omp_context *up;
3381 
3382   for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3383     t = maybe_lookup_decl (decl, up);
3384 
3385   gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3386 
3387   return t ? t : decl;
3388 }
3389 
3390 
3391 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3392    in outer contexts.  */
3393 
3394 static tree
3395 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3396 {
3397   tree t = NULL;
3398   omp_context *up;
3399 
3400   for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3401     t = maybe_lookup_decl (decl, up);
3402 
3403   return t ? t : decl;
3404 }
3405 
3406 
3407 /* Construct the initialization value for reduction operation OP.  */
3408 
3409 tree
3410 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3411 {
3412   switch (op)
3413     {
3414     case PLUS_EXPR:
3415     case MINUS_EXPR:
3416     case BIT_IOR_EXPR:
3417     case BIT_XOR_EXPR:
3418     case TRUTH_OR_EXPR:
3419     case TRUTH_ORIF_EXPR:
3420     case TRUTH_XOR_EXPR:
3421     case NE_EXPR:
3422       return build_zero_cst (type);
3423 
3424     case MULT_EXPR:
3425     case TRUTH_AND_EXPR:
3426     case TRUTH_ANDIF_EXPR:
3427     case EQ_EXPR:
3428       return fold_convert_loc (loc, type, integer_one_node);
3429 
3430     case BIT_AND_EXPR:
3431       return fold_convert_loc (loc, type, integer_minus_one_node);
3432 
3433     case MAX_EXPR:
3434       if (SCALAR_FLOAT_TYPE_P (type))
3435 	{
3436 	  REAL_VALUE_TYPE max, min;
3437 	  if (HONOR_INFINITIES (type))
3438 	    {
3439 	      real_inf (&max);
3440 	      real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3441 	    }
3442 	  else
3443 	    real_maxval (&min, 1, TYPE_MODE (type));
3444 	  return build_real (type, min);
3445 	}
3446       else if (POINTER_TYPE_P (type))
3447 	{
3448 	  wide_int min
3449 	    = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3450 	  return wide_int_to_tree (type, min);
3451 	}
3452       else
3453 	{
3454 	  gcc_assert (INTEGRAL_TYPE_P (type));
3455 	  return TYPE_MIN_VALUE (type);
3456 	}
3457 
3458     case MIN_EXPR:
3459       if (SCALAR_FLOAT_TYPE_P (type))
3460 	{
3461 	  REAL_VALUE_TYPE max;
3462 	  if (HONOR_INFINITIES (type))
3463 	    real_inf (&max);
3464 	  else
3465 	    real_maxval (&max, 0, TYPE_MODE (type));
3466 	  return build_real (type, max);
3467 	}
3468       else if (POINTER_TYPE_P (type))
3469 	{
3470 	  wide_int max
3471 	    = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3472 	  return wide_int_to_tree (type, max);
3473 	}
3474       else
3475 	{
3476 	  gcc_assert (INTEGRAL_TYPE_P (type));
3477 	  return TYPE_MAX_VALUE (type);
3478 	}
3479 
3480     default:
3481       gcc_unreachable ();
3482     }
3483 }
3484 
3485 /* Construct the initialization value for reduction CLAUSE.  */
3486 
3487 tree
3488 omp_reduction_init (tree clause, tree type)
3489 {
3490   return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3491 				OMP_CLAUSE_REDUCTION_CODE (clause), type);
3492 }
3493 
3494 /* Return alignment to be assumed for var in CLAUSE, which should be
3495    OMP_CLAUSE_ALIGNED.  */
3496 
3497 static tree
3498 omp_clause_aligned_alignment (tree clause)
3499 {
3500   if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3501     return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3502 
3503   /* Otherwise return implementation defined alignment.  */
3504   unsigned int al = 1;
3505   machine_mode mode, vmode;
3506   int vs = targetm.vectorize.autovectorize_vector_sizes ();
3507   if (vs)
3508     vs = 1 << floor_log2 (vs);
3509   static enum mode_class classes[]
3510     = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3511   for (int i = 0; i < 4; i += 2)
3512     for (mode = GET_CLASS_NARROWEST_MODE (classes[i]);
3513 	 mode != VOIDmode;
3514 	 mode = GET_MODE_WIDER_MODE (mode))
3515       {
3516 	vmode = targetm.vectorize.preferred_simd_mode (mode);
3517 	if (GET_MODE_CLASS (vmode) != classes[i + 1])
3518 	  continue;
3519 	while (vs
3520 	       && GET_MODE_SIZE (vmode) < vs
3521 	       && GET_MODE_2XWIDER_MODE (vmode) != VOIDmode)
3522 	  vmode = GET_MODE_2XWIDER_MODE (vmode);
3523 
3524 	tree type = lang_hooks.types.type_for_mode (mode, 1);
3525 	if (type == NULL_TREE || TYPE_MODE (type) != mode)
3526 	  continue;
3527 	type = build_vector_type (type, GET_MODE_SIZE (vmode)
3528 					/ GET_MODE_SIZE (mode));
3529 	if (TYPE_MODE (type) != vmode)
3530 	  continue;
3531 	if (TYPE_ALIGN_UNIT (type) > al)
3532 	  al = TYPE_ALIGN_UNIT (type);
3533       }
3534   return build_int_cst (integer_type_node, al);
3535 }
3536 
3537 
3538 /* This structure is part of the interface between lower_rec_simd_input_clauses
3539    and lower_rec_input_clauses.  */
3540 
3541 struct omplow_simd_context {
3542   tree idx;
3543   tree lane;
3544   vec<tree, va_heap> simt_eargs;
3545   gimple_seq simt_dlist;
3546   int max_vf;
3547   bool is_simt;
3548 };
3549 
3550 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3551    privatization.  */
3552 
3553 static bool
3554 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3555 			      omplow_simd_context *sctx, tree &ivar, tree &lvar)
3556 {
3557   if (sctx->max_vf == 0)
3558     {
3559       sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3560       if (sctx->max_vf > 1)
3561 	{
3562 	  tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3563 				    OMP_CLAUSE_SAFELEN);
3564 	  if (c
3565 	      && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) != INTEGER_CST
3566 		  || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c)) != 1))
3567 	    sctx->max_vf = 1;
3568 	  else if (c && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3569 					  sctx->max_vf) == -1)
3570 	    sctx->max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
3571 	}
3572       if (sctx->max_vf > 1)
3573 	{
3574 	  sctx->idx = create_tmp_var (unsigned_type_node);
3575 	  sctx->lane = create_tmp_var (unsigned_type_node);
3576 	}
3577     }
3578   if (sctx->max_vf == 1)
3579     return false;
3580 
3581   if (sctx->is_simt)
3582     {
3583       if (is_gimple_reg (new_var))
3584 	{
3585 	  ivar = lvar = new_var;
3586 	  return true;
3587 	}
3588       tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3589       ivar = lvar = create_tmp_var (type);
3590       TREE_ADDRESSABLE (ivar) = 1;
3591       DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3592 					  NULL, DECL_ATTRIBUTES (ivar));
3593       sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3594       tree clobber = build_constructor (type, NULL);
3595       TREE_THIS_VOLATILE (clobber) = 1;
3596       gimple *g = gimple_build_assign (ivar, clobber);
3597       gimple_seq_add_stmt (&sctx->simt_dlist, g);
3598     }
3599   else
3600     {
3601       tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3602       tree avar = create_tmp_var_raw (atype);
3603       if (TREE_ADDRESSABLE (new_var))
3604 	TREE_ADDRESSABLE (avar) = 1;
3605       DECL_ATTRIBUTES (avar)
3606 	= tree_cons (get_identifier ("omp simd array"), NULL,
3607 		     DECL_ATTRIBUTES (avar));
3608       gimple_add_tmp_var (avar);
3609       ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3610 		     NULL_TREE, NULL_TREE);
3611       lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3612 		     NULL_TREE, NULL_TREE);
3613     }
3614   if (DECL_P (new_var))
3615     {
3616       SET_DECL_VALUE_EXPR (new_var, lvar);
3617       DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3618     }
3619   return true;
3620 }
3621 
3622 /* Helper function of lower_rec_input_clauses.  For a reference
3623    in simd reduction, add an underlying variable it will reference.  */
3624 
3625 static void
3626 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3627 {
3628   tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3629   if (TREE_CONSTANT (z))
3630     {
3631       z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3632 			      get_name (new_vard));
3633       gimple_add_tmp_var (z);
3634       TREE_ADDRESSABLE (z) = 1;
3635       z = build_fold_addr_expr_loc (loc, z);
3636       gimplify_assign (new_vard, z, ilist);
3637     }
3638 }
3639 
3640 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3641    from the receiver (aka child) side and initializers for REFERENCE_TYPE
3642    private variables.  Initialization statements go in ILIST, while calls
3643    to destructors go in DLIST.  */
3644 
3645 static void
3646 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3647 			 omp_context *ctx, struct omp_for_data *fd)
3648 {
3649   tree c, dtor, copyin_seq, x, ptr;
3650   bool copyin_by_ref = false;
3651   bool lastprivate_firstprivate = false;
3652   bool reduction_omp_orig_ref = false;
3653   int pass;
3654   bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3655 		  && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3656   omplow_simd_context sctx = omplow_simd_context ();
3657   tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3658   tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3659   gimple_seq llist[3] = { };
3660 
3661   copyin_seq = NULL;
3662   sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3663 
3664   /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3665      with data sharing clauses referencing variable sized vars.  That
3666      is unnecessarily hard to support and very unlikely to result in
3667      vectorized code anyway.  */
3668   if (is_simd)
3669     for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3670       switch (OMP_CLAUSE_CODE (c))
3671 	{
3672 	case OMP_CLAUSE_LINEAR:
3673 	  if (OMP_CLAUSE_LINEAR_ARRAY (c))
3674 	    sctx.max_vf = 1;
3675 	  /* FALLTHRU */
3676 	case OMP_CLAUSE_PRIVATE:
3677 	case OMP_CLAUSE_FIRSTPRIVATE:
3678 	case OMP_CLAUSE_LASTPRIVATE:
3679 	  if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3680 	    sctx.max_vf = 1;
3681 	  break;
3682 	case OMP_CLAUSE_REDUCTION:
3683 	  if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3684 	      || is_variable_sized (OMP_CLAUSE_DECL (c)))
3685 	    sctx.max_vf = 1;
3686 	  break;
3687 	default:
3688 	  continue;
3689 	}
3690 
3691   /* Add a placeholder for simduid.  */
3692   if (sctx.is_simt && sctx.max_vf != 1)
3693     sctx.simt_eargs.safe_push (NULL_TREE);
3694 
3695   /* Do all the fixed sized types in the first pass, and the variable sized
3696      types in the second pass.  This makes sure that the scalar arguments to
3697      the variable sized types are processed before we use them in the
3698      variable sized operations.  */
3699   for (pass = 0; pass < 2; ++pass)
3700     {
3701       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3702 	{
3703 	  enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3704 	  tree var, new_var;
3705 	  bool by_ref;
3706 	  location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3707 
3708 	  switch (c_kind)
3709 	    {
3710 	    case OMP_CLAUSE_PRIVATE:
3711 	      if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3712 		continue;
3713 	      break;
3714 	    case OMP_CLAUSE_SHARED:
3715 	      /* Ignore shared directives in teams construct.  */
3716 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3717 		continue;
3718 	      if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3719 		{
3720 		  gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3721 			      || is_global_var (OMP_CLAUSE_DECL (c)));
3722 		  continue;
3723 		}
3724 	    case OMP_CLAUSE_FIRSTPRIVATE:
3725 	    case OMP_CLAUSE_COPYIN:
3726 	      break;
3727 	    case OMP_CLAUSE_LINEAR:
3728 	      if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3729 		  && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3730 		lastprivate_firstprivate = true;
3731 	      break;
3732 	    case OMP_CLAUSE_REDUCTION:
3733 	      if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3734 		reduction_omp_orig_ref = true;
3735 	      break;
3736 	    case OMP_CLAUSE__LOOPTEMP_:
3737 	      /* Handle _looptemp_ clauses only on parallel/task.  */
3738 	      if (fd)
3739 		continue;
3740 	      break;
3741 	    case OMP_CLAUSE_LASTPRIVATE:
3742 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3743 		{
3744 		  lastprivate_firstprivate = true;
3745 		  if (pass != 0 || is_taskloop_ctx (ctx))
3746 		    continue;
3747 		}
3748 	      /* Even without corresponding firstprivate, if
3749 		 decl is Fortran allocatable, it needs outer var
3750 		 reference.  */
3751 	      else if (pass == 0
3752 		       && lang_hooks.decls.omp_private_outer_ref
3753 							(OMP_CLAUSE_DECL (c)))
3754 		lastprivate_firstprivate = true;
3755 	      break;
3756 	    case OMP_CLAUSE_ALIGNED:
3757 	      if (pass == 0)
3758 		continue;
3759 	      var = OMP_CLAUSE_DECL (c);
3760 	      if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3761 		  && !is_global_var (var))
3762 		{
3763 		  new_var = maybe_lookup_decl (var, ctx);
3764 		  if (new_var == NULL_TREE)
3765 		    new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3766 		  x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3767 		  tree alarg = omp_clause_aligned_alignment (c);
3768 		  alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3769 		  x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3770 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3771 		  x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3772 		  gimplify_and_add (x, ilist);
3773 		}
3774 	      else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3775 		       && is_global_var (var))
3776 		{
3777 		  tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3778 		  new_var = lookup_decl (var, ctx);
3779 		  t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3780 		  t = build_fold_addr_expr_loc (clause_loc, t);
3781 		  t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3782 		  tree alarg = omp_clause_aligned_alignment (c);
3783 		  alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3784 		  t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3785 		  t = fold_convert_loc (clause_loc, ptype, t);
3786 		  x = create_tmp_var (ptype);
3787 		  t = build2 (MODIFY_EXPR, ptype, x, t);
3788 		  gimplify_and_add (t, ilist);
3789 		  t = build_simple_mem_ref_loc (clause_loc, x);
3790 		  SET_DECL_VALUE_EXPR (new_var, t);
3791 		  DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3792 		}
3793 	      continue;
3794 	    default:
3795 	      continue;
3796 	    }
3797 
3798 	  new_var = var = OMP_CLAUSE_DECL (c);
3799 	  if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3800 	    {
3801 	      var = TREE_OPERAND (var, 0);
3802 	      if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3803 		var = TREE_OPERAND (var, 0);
3804 	      if (TREE_CODE (var) == INDIRECT_REF
3805 		  || TREE_CODE (var) == ADDR_EXPR)
3806 		var = TREE_OPERAND (var, 0);
3807 	      if (is_variable_sized (var))
3808 		{
3809 		  gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3810 		  var = DECL_VALUE_EXPR (var);
3811 		  gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3812 		  var = TREE_OPERAND (var, 0);
3813 		  gcc_assert (DECL_P (var));
3814 		}
3815 	      new_var = var;
3816 	    }
3817 	  if (c_kind != OMP_CLAUSE_COPYIN)
3818 	    new_var = lookup_decl (var, ctx);
3819 
3820 	  if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3821 	    {
3822 	      if (pass != 0)
3823 		continue;
3824 	    }
3825 	  /* C/C++ array section reductions.  */
3826 	  else if (c_kind == OMP_CLAUSE_REDUCTION
3827 		   && var != OMP_CLAUSE_DECL (c))
3828 	    {
3829 	      if (pass == 0)
3830 		continue;
3831 
3832 	      tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3833 	      tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3834 	      if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3835 		{
3836 		  tree b = TREE_OPERAND (orig_var, 1);
3837 		  b = maybe_lookup_decl (b, ctx);
3838 		  if (b == NULL)
3839 		    {
3840 		      b = TREE_OPERAND (orig_var, 1);
3841 		      b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3842 		    }
3843 		  if (integer_zerop (bias))
3844 		    bias = b;
3845 		  else
3846 		    {
3847 		      bias = fold_convert_loc (clause_loc,
3848 					       TREE_TYPE (b), bias);
3849 		      bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3850 					      TREE_TYPE (b), b, bias);
3851 		    }
3852 		  orig_var = TREE_OPERAND (orig_var, 0);
3853 		}
3854 	      if (TREE_CODE (orig_var) == INDIRECT_REF
3855 		  || TREE_CODE (orig_var) == ADDR_EXPR)
3856 		orig_var = TREE_OPERAND (orig_var, 0);
3857 	      tree d = OMP_CLAUSE_DECL (c);
3858 	      tree type = TREE_TYPE (d);
3859 	      gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3860 	      tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3861 	      const char *name = get_name (orig_var);
3862 	      if (TREE_CONSTANT (v))
3863 		{
3864 		  x = create_tmp_var_raw (type, name);
3865 		  gimple_add_tmp_var (x);
3866 		  TREE_ADDRESSABLE (x) = 1;
3867 		  x = build_fold_addr_expr_loc (clause_loc, x);
3868 		}
3869 	      else
3870 		{
3871 		  tree atmp
3872 		    = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3873 		  tree t = maybe_lookup_decl (v, ctx);
3874 		  if (t)
3875 		    v = t;
3876 		  else
3877 		    v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3878 		  gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3879 		  t = fold_build2_loc (clause_loc, PLUS_EXPR,
3880 				       TREE_TYPE (v), v,
3881 				       build_int_cst (TREE_TYPE (v), 1));
3882 		  t = fold_build2_loc (clause_loc, MULT_EXPR,
3883 				       TREE_TYPE (v), t,
3884 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
3885 		  tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3886 		  x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3887 		}
3888 
3889 	      tree ptype = build_pointer_type (TREE_TYPE (type));
3890 	      x = fold_convert_loc (clause_loc, ptype, x);
3891 	      tree y = create_tmp_var (ptype, name);
3892 	      gimplify_assign (y, x, ilist);
3893 	      x = y;
3894 	      tree yb = y;
3895 
3896 	      if (!integer_zerop (bias))
3897 		{
3898 		  bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3899 					   bias);
3900 		  yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3901 					 x);
3902 		  yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3903 					pointer_sized_int_node, yb, bias);
3904 		  x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3905 		  yb = create_tmp_var (ptype, name);
3906 		  gimplify_assign (yb, x, ilist);
3907 		  x = yb;
3908 		}
3909 
3910 	      d = TREE_OPERAND (d, 0);
3911 	      if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3912 		d = TREE_OPERAND (d, 0);
3913 	      if (TREE_CODE (d) == ADDR_EXPR)
3914 		{
3915 		  if (orig_var != var)
3916 		    {
3917 		      gcc_assert (is_variable_sized (orig_var));
3918 		      x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3919 					    x);
3920 		      gimplify_assign (new_var, x, ilist);
3921 		      tree new_orig_var = lookup_decl (orig_var, ctx);
3922 		      tree t = build_fold_indirect_ref (new_var);
3923 		      DECL_IGNORED_P (new_var) = 0;
3924 		      TREE_THIS_NOTRAP (t);
3925 		      SET_DECL_VALUE_EXPR (new_orig_var, t);
3926 		      DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3927 		    }
3928 		  else
3929 		    {
3930 		      x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3931 				  build_int_cst (ptype, 0));
3932 		      SET_DECL_VALUE_EXPR (new_var, x);
3933 		      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3934 		    }
3935 		}
3936 	      else
3937 		{
3938 		  gcc_assert (orig_var == var);
3939 		  if (TREE_CODE (d) == INDIRECT_REF)
3940 		    {
3941 		      x = create_tmp_var (ptype, name);
3942 		      TREE_ADDRESSABLE (x) = 1;
3943 		      gimplify_assign (x, yb, ilist);
3944 		      x = build_fold_addr_expr_loc (clause_loc, x);
3945 		    }
3946 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3947 		  gimplify_assign (new_var, x, ilist);
3948 		}
3949 	      tree y1 = create_tmp_var (ptype, NULL);
3950 	      gimplify_assign (y1, y, ilist);
3951 	      tree i2 = NULL_TREE, y2 = NULL_TREE;
3952 	      tree body2 = NULL_TREE, end2 = NULL_TREE;
3953 	      tree y3 = NULL_TREE, y4 = NULL_TREE;
3954 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3955 		{
3956 		  y2 = create_tmp_var (ptype, NULL);
3957 		  gimplify_assign (y2, y, ilist);
3958 		  tree ref = build_outer_var_ref (var, ctx);
3959 		  /* For ref build_outer_var_ref already performs this.  */
3960 		  if (TREE_CODE (d) == INDIRECT_REF)
3961 		    gcc_assert (omp_is_reference (var));
3962 		  else if (TREE_CODE (d) == ADDR_EXPR)
3963 		    ref = build_fold_addr_expr (ref);
3964 		  else if (omp_is_reference (var))
3965 		    ref = build_fold_addr_expr (ref);
3966 		  ref = fold_convert_loc (clause_loc, ptype, ref);
3967 		  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3968 		      && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3969 		    {
3970 		      y3 = create_tmp_var (ptype, NULL);
3971 		      gimplify_assign (y3, unshare_expr (ref), ilist);
3972 		    }
3973 		  if (is_simd)
3974 		    {
3975 		      y4 = create_tmp_var (ptype, NULL);
3976 		      gimplify_assign (y4, ref, dlist);
3977 		    }
3978 		}
3979 	      tree i = create_tmp_var (TREE_TYPE (v), NULL);
3980 	      gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3981 	      tree body = create_artificial_label (UNKNOWN_LOCATION);
3982 	      tree end = create_artificial_label (UNKNOWN_LOCATION);
3983 	      gimple_seq_add_stmt (ilist, gimple_build_label (body));
3984 	      if (y2)
3985 		{
3986 		  i2 = create_tmp_var (TREE_TYPE (v), NULL);
3987 		  gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3988 		  body2 = create_artificial_label (UNKNOWN_LOCATION);
3989 		  end2 = create_artificial_label (UNKNOWN_LOCATION);
3990 		  gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3991 		}
3992 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3993 		{
3994 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3995 		  tree decl_placeholder
3996 		    = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3997 		  SET_DECL_VALUE_EXPR (decl_placeholder,
3998 				       build_simple_mem_ref (y1));
3999 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4000 		  SET_DECL_VALUE_EXPR (placeholder,
4001 				       y3 ? build_simple_mem_ref (y3)
4002 				       : error_mark_node);
4003 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4004 		  x = lang_hooks.decls.omp_clause_default_ctor
4005 				(c, build_simple_mem_ref (y1),
4006 				 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4007 		  if (x)
4008 		    gimplify_and_add (x, ilist);
4009 		  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4010 		    {
4011 		      gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4012 		      lower_omp (&tseq, ctx);
4013 		      gimple_seq_add_seq (ilist, tseq);
4014 		    }
4015 		  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4016 		  if (is_simd)
4017 		    {
4018 		      SET_DECL_VALUE_EXPR (decl_placeholder,
4019 					   build_simple_mem_ref (y2));
4020 		      SET_DECL_VALUE_EXPR (placeholder,
4021 					   build_simple_mem_ref (y4));
4022 		      gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4023 		      lower_omp (&tseq, ctx);
4024 		      gimple_seq_add_seq (dlist, tseq);
4025 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4026 		    }
4027 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4028 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4029 		  x = lang_hooks.decls.omp_clause_dtor
4030 					(c, build_simple_mem_ref (y2));
4031 		  if (x)
4032 		    {
4033 		      gimple_seq tseq = NULL;
4034 		      dtor = x;
4035 		      gimplify_stmt (&dtor, &tseq);
4036 		      gimple_seq_add_seq (dlist, tseq);
4037 		    }
4038 		}
4039 	      else
4040 		{
4041 		  x = omp_reduction_init (c, TREE_TYPE (type));
4042 		  enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4043 
4044 		  /* reduction(-:var) sums up the partial results, so it
4045 		     acts identically to reduction(+:var).  */
4046 		  if (code == MINUS_EXPR)
4047 		    code = PLUS_EXPR;
4048 
4049 		  gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4050 		  if (is_simd)
4051 		    {
4052 		      x = build2 (code, TREE_TYPE (type),
4053 				  build_simple_mem_ref (y4),
4054 				  build_simple_mem_ref (y2));
4055 		      gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4056 		    }
4057 		}
4058 	      gimple *g
4059 		= gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4060 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
4061 	      gimple_seq_add_stmt (ilist, g);
4062 	      if (y3)
4063 		{
4064 		  g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4065 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
4066 		  gimple_seq_add_stmt (ilist, g);
4067 		}
4068 	      g = gimple_build_assign (i, PLUS_EXPR, i,
4069 				       build_int_cst (TREE_TYPE (i), 1));
4070 	      gimple_seq_add_stmt (ilist, g);
4071 	      g = gimple_build_cond (LE_EXPR, i, v, body, end);
4072 	      gimple_seq_add_stmt (ilist, g);
4073 	      gimple_seq_add_stmt (ilist, gimple_build_label (end));
4074 	      if (y2)
4075 		{
4076 		  g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4077 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
4078 		  gimple_seq_add_stmt (dlist, g);
4079 		  if (y4)
4080 		    {
4081 		      g = gimple_build_assign
4082 					(y4, POINTER_PLUS_EXPR, y4,
4083 					 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4084 		      gimple_seq_add_stmt (dlist, g);
4085 		    }
4086 		  g = gimple_build_assign (i2, PLUS_EXPR, i2,
4087 					   build_int_cst (TREE_TYPE (i2), 1));
4088 		  gimple_seq_add_stmt (dlist, g);
4089 		  g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4090 		  gimple_seq_add_stmt (dlist, g);
4091 		  gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4092 		}
4093 	      continue;
4094 	    }
4095 	  else if (is_variable_sized (var))
4096 	    {
4097 	      /* For variable sized types, we need to allocate the
4098 		 actual storage here.  Call alloca and store the
4099 		 result in the pointer decl that we created elsewhere.  */
4100 	      if (pass == 0)
4101 		continue;
4102 
4103 	      if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4104 		{
4105 		  gcall *stmt;
4106 		  tree tmp, atmp;
4107 
4108 		  ptr = DECL_VALUE_EXPR (new_var);
4109 		  gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4110 		  ptr = TREE_OPERAND (ptr, 0);
4111 		  gcc_assert (DECL_P (ptr));
4112 		  x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4113 
4114 		  /* void *tmp = __builtin_alloca */
4115 		  atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4116 		  stmt = gimple_build_call (atmp, 2, x,
4117 					    size_int (DECL_ALIGN (var)));
4118 		  tmp = create_tmp_var_raw (ptr_type_node);
4119 		  gimple_add_tmp_var (tmp);
4120 		  gimple_call_set_lhs (stmt, tmp);
4121 
4122 		  gimple_seq_add_stmt (ilist, stmt);
4123 
4124 		  x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4125 		  gimplify_assign (ptr, x, ilist);
4126 		}
4127 	    }
4128 	  else if (omp_is_reference (var))
4129 	    {
4130 	      /* For references that are being privatized for Fortran,
4131 		 allocate new backing storage for the new pointer
4132 		 variable.  This allows us to avoid changing all the
4133 		 code that expects a pointer to something that expects
4134 		 a direct variable.  */
4135 	      if (pass == 0)
4136 		continue;
4137 
4138 	      x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4139 	      if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4140 		{
4141 		  x = build_receiver_ref (var, false, ctx);
4142 		  x = build_fold_addr_expr_loc (clause_loc, x);
4143 		}
4144 	      else if (TREE_CONSTANT (x))
4145 		{
4146 		  /* For reduction in SIMD loop, defer adding the
4147 		     initialization of the reference, because if we decide
4148 		     to use SIMD array for it, the initilization could cause
4149 		     expansion ICE.  */
4150 		  if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4151 		    x = NULL_TREE;
4152 		  else
4153 		    {
4154 		      x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4155 					      get_name (var));
4156 		      gimple_add_tmp_var (x);
4157 		      TREE_ADDRESSABLE (x) = 1;
4158 		      x = build_fold_addr_expr_loc (clause_loc, x);
4159 		    }
4160 		}
4161 	      else
4162 		{
4163 		  tree atmp
4164 		    = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4165 		  tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4166 		  tree al = size_int (TYPE_ALIGN (rtype));
4167 		  x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4168 		}
4169 
4170 	      if (x)
4171 		{
4172 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4173 		  gimplify_assign (new_var, x, ilist);
4174 		}
4175 
4176 	      new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4177 	    }
4178 	  else if (c_kind == OMP_CLAUSE_REDUCTION
4179 		   && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4180 	    {
4181 	      if (pass == 0)
4182 		continue;
4183 	    }
4184 	  else if (pass != 0)
4185 	    continue;
4186 
4187 	  switch (OMP_CLAUSE_CODE (c))
4188 	    {
4189 	    case OMP_CLAUSE_SHARED:
4190 	      /* Ignore shared directives in teams construct.  */
4191 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4192 		continue;
4193 	      /* Shared global vars are just accessed directly.  */
4194 	      if (is_global_var (new_var))
4195 		break;
4196 	      /* For taskloop firstprivate/lastprivate, represented
4197 		 as firstprivate and shared clause on the task, new_var
4198 		 is the firstprivate var.  */
4199 	      if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4200 		break;
4201 	      /* Set up the DECL_VALUE_EXPR for shared variables now.  This
4202 		 needs to be delayed until after fixup_child_record_type so
4203 		 that we get the correct type during the dereference.  */
4204 	      by_ref = use_pointer_for_field (var, ctx);
4205 	      x = build_receiver_ref (var, by_ref, ctx);
4206 	      SET_DECL_VALUE_EXPR (new_var, x);
4207 	      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4208 
4209 	      /* ??? If VAR is not passed by reference, and the variable
4210 		 hasn't been initialized yet, then we'll get a warning for
4211 		 the store into the omp_data_s structure.  Ideally, we'd be
4212 		 able to notice this and not store anything at all, but
4213 		 we're generating code too early.  Suppress the warning.  */
4214 	      if (!by_ref)
4215 		TREE_NO_WARNING (var) = 1;
4216 	      break;
4217 
4218 	    case OMP_CLAUSE_LASTPRIVATE:
4219 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4220 		break;
4221 	      /* FALLTHRU */
4222 
4223 	    case OMP_CLAUSE_PRIVATE:
4224 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4225 		x = build_outer_var_ref (var, ctx);
4226 	      else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4227 		{
4228 		  if (is_task_ctx (ctx))
4229 		    x = build_receiver_ref (var, false, ctx);
4230 		  else
4231 		    x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4232 		}
4233 	      else
4234 		x = NULL;
4235 	    do_private:
4236 	      tree nx;
4237 	      nx = lang_hooks.decls.omp_clause_default_ctor
4238 						(c, unshare_expr (new_var), x);
4239 	      if (is_simd)
4240 		{
4241 		  tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4242 		  if ((TREE_ADDRESSABLE (new_var) || nx || y
4243 		       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4244 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4245 						       ivar, lvar))
4246 		    {
4247 		      if (nx)
4248 			x = lang_hooks.decls.omp_clause_default_ctor
4249 						(c, unshare_expr (ivar), x);
4250 		      if (nx && x)
4251 			gimplify_and_add (x, &llist[0]);
4252 		      if (y)
4253 			{
4254 			  y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4255 			  if (y)
4256 			    {
4257 			      gimple_seq tseq = NULL;
4258 
4259 			      dtor = y;
4260 			      gimplify_stmt (&dtor, &tseq);
4261 			      gimple_seq_add_seq (&llist[1], tseq);
4262 			    }
4263 			}
4264 		      break;
4265 		    }
4266 		}
4267 	      if (nx)
4268 		gimplify_and_add (nx, ilist);
4269 	      /* FALLTHRU */
4270 
4271 	    do_dtor:
4272 	      x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4273 	      if (x)
4274 		{
4275 		  gimple_seq tseq = NULL;
4276 
4277 		  dtor = x;
4278 		  gimplify_stmt (&dtor, &tseq);
4279 		  gimple_seq_add_seq (dlist, tseq);
4280 		}
4281 	      break;
4282 
4283 	    case OMP_CLAUSE_LINEAR:
4284 	      if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4285 		goto do_firstprivate;
4286 	      if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4287 		x = NULL;
4288 	      else
4289 		x = build_outer_var_ref (var, ctx);
4290 	      goto do_private;
4291 
4292 	    case OMP_CLAUSE_FIRSTPRIVATE:
4293 	      if (is_task_ctx (ctx))
4294 		{
4295 		  if (omp_is_reference (var) || is_variable_sized (var))
4296 		    goto do_dtor;
4297 		  else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4298 									  ctx))
4299 			   || use_pointer_for_field (var, NULL))
4300 		    {
4301 		      x = build_receiver_ref (var, false, ctx);
4302 		      SET_DECL_VALUE_EXPR (new_var, x);
4303 		      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4304 		      goto do_dtor;
4305 		    }
4306 		}
4307 	    do_firstprivate:
4308 	      x = build_outer_var_ref (var, ctx);
4309 	      if (is_simd)
4310 		{
4311 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4312 		      && gimple_omp_for_combined_into_p (ctx->stmt))
4313 		    {
4314 		      tree t = OMP_CLAUSE_LINEAR_STEP (c);
4315 		      tree stept = TREE_TYPE (t);
4316 		      tree ct = omp_find_clause (clauses,
4317 						 OMP_CLAUSE__LOOPTEMP_);
4318 		      gcc_assert (ct);
4319 		      tree l = OMP_CLAUSE_DECL (ct);
4320 		      tree n1 = fd->loop.n1;
4321 		      tree step = fd->loop.step;
4322 		      tree itype = TREE_TYPE (l);
4323 		      if (POINTER_TYPE_P (itype))
4324 			itype = signed_type_for (itype);
4325 		      l = fold_build2 (MINUS_EXPR, itype, l, n1);
4326 		      if (TYPE_UNSIGNED (itype)
4327 			  && fd->loop.cond_code == GT_EXPR)
4328 			l = fold_build2 (TRUNC_DIV_EXPR, itype,
4329 					 fold_build1 (NEGATE_EXPR, itype, l),
4330 					 fold_build1 (NEGATE_EXPR,
4331 						      itype, step));
4332 		      else
4333 			l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4334 		      t = fold_build2 (MULT_EXPR, stept,
4335 				       fold_convert (stept, l), t);
4336 
4337 		      if (OMP_CLAUSE_LINEAR_ARRAY (c))
4338 			{
4339 			  x = lang_hooks.decls.omp_clause_linear_ctor
4340 							(c, new_var, x, t);
4341 			  gimplify_and_add (x, ilist);
4342 			  goto do_dtor;
4343 			}
4344 
4345 		      if (POINTER_TYPE_P (TREE_TYPE (x)))
4346 			x = fold_build2 (POINTER_PLUS_EXPR,
4347 					 TREE_TYPE (x), x, t);
4348 		      else
4349 			x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4350 		    }
4351 
4352 		  if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4353 		       || TREE_ADDRESSABLE (new_var))
4354 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4355 						       ivar, lvar))
4356 		    {
4357 		      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4358 			{
4359 			  tree iv = create_tmp_var (TREE_TYPE (new_var));
4360 			  x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4361 			  gimplify_and_add (x, ilist);
4362 			  gimple_stmt_iterator gsi
4363 			    = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4364 			  gassign *g
4365 			    = gimple_build_assign (unshare_expr (lvar), iv);
4366 			  gsi_insert_before_without_update (&gsi, g,
4367 							    GSI_SAME_STMT);
4368 			  tree t = OMP_CLAUSE_LINEAR_STEP (c);
4369 			  enum tree_code code = PLUS_EXPR;
4370 			  if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4371 			    code = POINTER_PLUS_EXPR;
4372 			  g = gimple_build_assign (iv, code, iv, t);
4373 			  gsi_insert_before_without_update (&gsi, g,
4374 							    GSI_SAME_STMT);
4375 			  break;
4376 			}
4377 		      x = lang_hooks.decls.omp_clause_copy_ctor
4378 						(c, unshare_expr (ivar), x);
4379 		      gimplify_and_add (x, &llist[0]);
4380 		      x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4381 		      if (x)
4382 			{
4383 			  gimple_seq tseq = NULL;
4384 
4385 			  dtor = x;
4386 			  gimplify_stmt (&dtor, &tseq);
4387 			  gimple_seq_add_seq (&llist[1], tseq);
4388 			}
4389 		      break;
4390 		    }
4391 		}
4392 	      x = lang_hooks.decls.omp_clause_copy_ctor
4393 						(c, unshare_expr (new_var), x);
4394 	      gimplify_and_add (x, ilist);
4395 	      goto do_dtor;
4396 
4397 	    case OMP_CLAUSE__LOOPTEMP_:
4398 	      gcc_assert (is_taskreg_ctx (ctx));
4399 	      x = build_outer_var_ref (var, ctx);
4400 	      x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4401 	      gimplify_and_add (x, ilist);
4402 	      break;
4403 
4404 	    case OMP_CLAUSE_COPYIN:
4405 	      by_ref = use_pointer_for_field (var, NULL);
4406 	      x = build_receiver_ref (var, by_ref, ctx);
4407 	      x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4408 	      append_to_statement_list (x, &copyin_seq);
4409 	      copyin_by_ref |= by_ref;
4410 	      break;
4411 
4412 	    case OMP_CLAUSE_REDUCTION:
4413 	      /* OpenACC reductions are initialized using the
4414 		 GOACC_REDUCTION internal function.  */
4415 	      if (is_gimple_omp_oacc (ctx->stmt))
4416 		break;
4417 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4418 		{
4419 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4420 		  gimple *tseq;
4421 		  x = build_outer_var_ref (var, ctx);
4422 
4423 		  if (omp_is_reference (var)
4424 		      && !useless_type_conversion_p (TREE_TYPE (placeholder),
4425 						     TREE_TYPE (x)))
4426 		    x = build_fold_addr_expr_loc (clause_loc, x);
4427 		  SET_DECL_VALUE_EXPR (placeholder, x);
4428 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4429 		  tree new_vard = new_var;
4430 		  if (omp_is_reference (var))
4431 		    {
4432 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
4433 		      new_vard = TREE_OPERAND (new_var, 0);
4434 		      gcc_assert (DECL_P (new_vard));
4435 		    }
4436 		  if (is_simd
4437 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4438 						       ivar, lvar))
4439 		    {
4440 		      if (new_vard == new_var)
4441 			{
4442 			  gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4443 			  SET_DECL_VALUE_EXPR (new_var, ivar);
4444 			}
4445 		      else
4446 			{
4447 			  SET_DECL_VALUE_EXPR (new_vard,
4448 					       build_fold_addr_expr (ivar));
4449 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4450 			}
4451 		      x = lang_hooks.decls.omp_clause_default_ctor
4452 				(c, unshare_expr (ivar),
4453 				 build_outer_var_ref (var, ctx));
4454 		      if (x)
4455 			gimplify_and_add (x, &llist[0]);
4456 		      if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4457 			{
4458 			  tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4459 			  lower_omp (&tseq, ctx);
4460 			  gimple_seq_add_seq (&llist[0], tseq);
4461 			}
4462 		      OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4463 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4464 		      lower_omp (&tseq, ctx);
4465 		      gimple_seq_add_seq (&llist[1], tseq);
4466 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4467 		      DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4468 		      if (new_vard == new_var)
4469 			SET_DECL_VALUE_EXPR (new_var, lvar);
4470 		      else
4471 			SET_DECL_VALUE_EXPR (new_vard,
4472 					     build_fold_addr_expr (lvar));
4473 		      x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4474 		      if (x)
4475 			{
4476 			  tseq = NULL;
4477 			  dtor = x;
4478 			  gimplify_stmt (&dtor, &tseq);
4479 			  gimple_seq_add_seq (&llist[1], tseq);
4480 			}
4481 		      break;
4482 		    }
4483 		  /* If this is a reference to constant size reduction var
4484 		     with placeholder, we haven't emitted the initializer
4485 		     for it because it is undesirable if SIMD arrays are used.
4486 		     But if they aren't used, we need to emit the deferred
4487 		     initialization now.  */
4488 		  else if (omp_is_reference (var) && is_simd)
4489 		    handle_simd_reference (clause_loc, new_vard, ilist);
4490 		  x = lang_hooks.decls.omp_clause_default_ctor
4491 				(c, unshare_expr (new_var),
4492 				 build_outer_var_ref (var, ctx));
4493 		  if (x)
4494 		    gimplify_and_add (x, ilist);
4495 		  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4496 		    {
4497 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4498 		      lower_omp (&tseq, ctx);
4499 		      gimple_seq_add_seq (ilist, tseq);
4500 		    }
4501 		  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4502 		  if (is_simd)
4503 		    {
4504 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4505 		      lower_omp (&tseq, ctx);
4506 		      gimple_seq_add_seq (dlist, tseq);
4507 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4508 		    }
4509 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4510 		  goto do_dtor;
4511 		}
4512 	      else
4513 		{
4514 		  x = omp_reduction_init (c, TREE_TYPE (new_var));
4515 		  gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4516 		  enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4517 
4518 		  /* reduction(-:var) sums up the partial results, so it
4519 		     acts identically to reduction(+:var).  */
4520 		  if (code == MINUS_EXPR)
4521 		    code = PLUS_EXPR;
4522 
4523 		  tree new_vard = new_var;
4524 		  if (is_simd && omp_is_reference (var))
4525 		    {
4526 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
4527 		      new_vard = TREE_OPERAND (new_var, 0);
4528 		      gcc_assert (DECL_P (new_vard));
4529 		    }
4530 		  if (is_simd
4531 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4532 						       ivar, lvar))
4533 		    {
4534 		      tree ref = build_outer_var_ref (var, ctx);
4535 
4536 		      gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4537 
4538 		      if (sctx.is_simt)
4539 			{
4540 			  if (!simt_lane)
4541 			    simt_lane = create_tmp_var (unsigned_type_node);
4542 			  x = build_call_expr_internal_loc
4543 			    (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4544 			     TREE_TYPE (ivar), 2, ivar, simt_lane);
4545 			  x = build2 (code, TREE_TYPE (ivar), ivar, x);
4546 			  gimplify_assign (ivar, x, &llist[2]);
4547 			}
4548 		      x = build2 (code, TREE_TYPE (ref), ref, ivar);
4549 		      ref = build_outer_var_ref (var, ctx);
4550 		      gimplify_assign (ref, x, &llist[1]);
4551 
4552 		      if (new_vard != new_var)
4553 			{
4554 			  SET_DECL_VALUE_EXPR (new_vard,
4555 					       build_fold_addr_expr (lvar));
4556 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4557 			}
4558 		    }
4559 		  else
4560 		    {
4561 		      if (omp_is_reference (var) && is_simd)
4562 			handle_simd_reference (clause_loc, new_vard, ilist);
4563 		      gimplify_assign (new_var, x, ilist);
4564 		      if (is_simd)
4565 			{
4566 			  tree ref = build_outer_var_ref (var, ctx);
4567 
4568 			  x = build2 (code, TREE_TYPE (ref), ref, new_var);
4569 			  ref = build_outer_var_ref (var, ctx);
4570 			  gimplify_assign (ref, x, dlist);
4571 			}
4572 		    }
4573 		}
4574 	      break;
4575 
4576 	    default:
4577 	      gcc_unreachable ();
4578 	    }
4579 	}
4580     }
4581 
4582   if (sctx.max_vf == 1)
4583     sctx.is_simt = false;
4584 
4585   if (sctx.lane || sctx.is_simt)
4586     {
4587       uid = create_tmp_var (ptr_type_node, "simduid");
4588       /* Don't want uninit warnings on simduid, it is always uninitialized,
4589 	 but we use it not for the value, but for the DECL_UID only.  */
4590       TREE_NO_WARNING (uid) = 1;
4591       c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4592       OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4593       OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4594       gimple_omp_for_set_clauses (ctx->stmt, c);
4595     }
4596   /* Emit calls denoting privatized variables and initializing a pointer to
4597      structure that holds private variables as fields after ompdevlow pass.  */
4598   if (sctx.is_simt)
4599     {
4600       sctx.simt_eargs[0] = uid;
4601       gimple *g
4602 	= gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4603       gimple_call_set_lhs (g, uid);
4604       gimple_seq_add_stmt (ilist, g);
4605       sctx.simt_eargs.release ();
4606 
4607       simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4608       g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4609       gimple_call_set_lhs (g, simtrec);
4610       gimple_seq_add_stmt (ilist, g);
4611     }
4612   if (sctx.lane)
4613     {
4614       gimple *g
4615 	= gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4616       gimple_call_set_lhs (g, sctx.lane);
4617       gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4618       gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4619       g = gimple_build_assign (sctx.lane, INTEGER_CST,
4620 			       build_int_cst (unsigned_type_node, 0));
4621       gimple_seq_add_stmt (ilist, g);
4622       /* Emit reductions across SIMT lanes in log_2(simt_vf) steps.  */
4623       if (llist[2])
4624 	{
4625 	  tree simt_vf = create_tmp_var (unsigned_type_node);
4626 	  g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4627 	  gimple_call_set_lhs (g, simt_vf);
4628 	  gimple_seq_add_stmt (dlist, g);
4629 
4630 	  tree t = build_int_cst (unsigned_type_node, 1);
4631 	  g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4632 	  gimple_seq_add_stmt (dlist, g);
4633 
4634 	  t = build_int_cst (unsigned_type_node, 0);
4635 	  g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4636 	  gimple_seq_add_stmt (dlist, g);
4637 
4638 	  tree body = create_artificial_label (UNKNOWN_LOCATION);
4639 	  tree header = create_artificial_label (UNKNOWN_LOCATION);
4640 	  tree end = create_artificial_label (UNKNOWN_LOCATION);
4641 	  gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4642 	  gimple_seq_add_stmt (dlist, gimple_build_label (body));
4643 
4644 	  gimple_seq_add_seq (dlist, llist[2]);
4645 
4646 	  g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4647 	  gimple_seq_add_stmt (dlist, g);
4648 
4649 	  gimple_seq_add_stmt (dlist, gimple_build_label (header));
4650 	  g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4651 	  gimple_seq_add_stmt (dlist, g);
4652 
4653 	  gimple_seq_add_stmt (dlist, gimple_build_label (end));
4654 	}
4655       for (int i = 0; i < 2; i++)
4656 	if (llist[i])
4657 	  {
4658 	    tree vf = create_tmp_var (unsigned_type_node);
4659 	    g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4660 	    gimple_call_set_lhs (g, vf);
4661 	    gimple_seq *seq = i == 0 ? ilist : dlist;
4662 	    gimple_seq_add_stmt (seq, g);
4663 	    tree t = build_int_cst (unsigned_type_node, 0);
4664 	    g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4665 	    gimple_seq_add_stmt (seq, g);
4666 	    tree body = create_artificial_label (UNKNOWN_LOCATION);
4667 	    tree header = create_artificial_label (UNKNOWN_LOCATION);
4668 	    tree end = create_artificial_label (UNKNOWN_LOCATION);
4669 	    gimple_seq_add_stmt (seq, gimple_build_goto (header));
4670 	    gimple_seq_add_stmt (seq, gimple_build_label (body));
4671 	    gimple_seq_add_seq (seq, llist[i]);
4672 	    t = build_int_cst (unsigned_type_node, 1);
4673 	    g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4674 	    gimple_seq_add_stmt (seq, g);
4675 	    gimple_seq_add_stmt (seq, gimple_build_label (header));
4676 	    g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4677 	    gimple_seq_add_stmt (seq, g);
4678 	    gimple_seq_add_stmt (seq, gimple_build_label (end));
4679 	  }
4680     }
4681   if (sctx.is_simt)
4682     {
4683       gimple_seq_add_seq (dlist, sctx.simt_dlist);
4684       gimple *g
4685 	= gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4686       gimple_seq_add_stmt (dlist, g);
4687     }
4688 
4689   /* The copyin sequence is not to be executed by the main thread, since
4690      that would result in self-copies.  Perhaps not visible to scalars,
4691      but it certainly is to C++ operator=.  */
4692   if (copyin_seq)
4693     {
4694       x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4695 			   0);
4696       x = build2 (NE_EXPR, boolean_type_node, x,
4697 		  build_int_cst (TREE_TYPE (x), 0));
4698       x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4699       gimplify_and_add (x, ilist);
4700     }
4701 
4702   /* If any copyin variable is passed by reference, we must ensure the
4703      master thread doesn't modify it before it is copied over in all
4704      threads.  Similarly for variables in both firstprivate and
4705      lastprivate clauses we need to ensure the lastprivate copying
4706      happens after firstprivate copying in all threads.  And similarly
4707      for UDRs if initializer expression refers to omp_orig.  */
4708   if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4709     {
4710       /* Don't add any barrier for #pragma omp simd or
4711 	 #pragma omp distribute.  */
4712       if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4713 	  || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4714 	gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4715     }
4716 
4717   /* If max_vf is non-zero, then we can use only a vectorization factor
4718      up to the max_vf we chose.  So stick it into the safelen clause.  */
4719   if (sctx.max_vf)
4720     {
4721       tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4722 				OMP_CLAUSE_SAFELEN);
4723       if (c == NULL_TREE
4724 	  || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) == INTEGER_CST
4725 	      && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
4726 				   sctx.max_vf) == 1))
4727 	{
4728 	  c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4729 	  OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4730 						       sctx.max_vf);
4731 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4732 	  gimple_omp_for_set_clauses (ctx->stmt, c);
4733 	}
4734     }
4735 }
4736 
4737 
4738 /* Generate code to implement the LASTPRIVATE clauses.  This is used for
4739    both parallel and workshare constructs.  PREDICATE may be NULL if it's
4740    always true.   */
4741 
4742 static void
4743 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4744 			   omp_context *ctx)
4745 {
4746   tree x, c, label = NULL, orig_clauses = clauses;
4747   bool par_clauses = false;
4748   tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4749 
4750   /* Early exit if there are no lastprivate or linear clauses.  */
4751   for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4752     if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4753 	|| (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4754 	    && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4755       break;
4756   if (clauses == NULL)
4757     {
4758       /* If this was a workshare clause, see if it had been combined
4759 	 with its parallel.  In that case, look for the clauses on the
4760 	 parallel statement itself.  */
4761       if (is_parallel_ctx (ctx))
4762 	return;
4763 
4764       ctx = ctx->outer;
4765       if (ctx == NULL || !is_parallel_ctx (ctx))
4766 	return;
4767 
4768       clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4769 				 OMP_CLAUSE_LASTPRIVATE);
4770       if (clauses == NULL)
4771 	return;
4772       par_clauses = true;
4773     }
4774 
4775   bool maybe_simt = false;
4776   if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4777       && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4778     {
4779       maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4780       simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4781       if (simduid)
4782 	simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4783     }
4784 
4785   if (predicate)
4786     {
4787       gcond *stmt;
4788       tree label_true, arm1, arm2;
4789       enum tree_code pred_code = TREE_CODE (predicate);
4790 
4791       label = create_artificial_label (UNKNOWN_LOCATION);
4792       label_true = create_artificial_label (UNKNOWN_LOCATION);
4793       if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4794 	{
4795 	  arm1 = TREE_OPERAND (predicate, 0);
4796 	  arm2 = TREE_OPERAND (predicate, 1);
4797 	  gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4798 	  gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4799 	}
4800       else
4801 	{
4802 	  arm1 = predicate;
4803 	  gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4804 	  arm2 = boolean_false_node;
4805 	  pred_code = NE_EXPR;
4806 	}
4807       if (maybe_simt)
4808 	{
4809 	  c = build2 (pred_code, boolean_type_node, arm1, arm2);
4810 	  c = fold_convert (integer_type_node, c);
4811 	  simtcond = create_tmp_var (integer_type_node);
4812 	  gimplify_assign (simtcond, c, stmt_list);
4813 	  gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4814 						 1, simtcond);
4815 	  c = create_tmp_var (integer_type_node);
4816 	  gimple_call_set_lhs (g, c);
4817 	  gimple_seq_add_stmt (stmt_list, g);
4818 	  stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4819 				    label_true, label);
4820 	}
4821       else
4822 	stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4823       gimple_seq_add_stmt (stmt_list, stmt);
4824       gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4825     }
4826 
4827   for (c = clauses; c ;)
4828     {
4829       tree var, new_var;
4830       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4831 
4832       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4833 	  || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4834 	      && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4835 	{
4836 	  var = OMP_CLAUSE_DECL (c);
4837 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4838 	      && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4839 	      && is_taskloop_ctx (ctx))
4840 	    {
4841 	      gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4842 	      new_var = lookup_decl (var, ctx->outer);
4843 	    }
4844 	  else
4845 	    {
4846 	      new_var = lookup_decl (var, ctx);
4847 	      /* Avoid uninitialized warnings for lastprivate and
4848 		 for linear iterators.  */
4849 	      if (predicate
4850 		  && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4851 		      || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4852 		TREE_NO_WARNING (new_var) = 1;
4853 	    }
4854 
4855 	  if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4856 	    {
4857 	      tree val = DECL_VALUE_EXPR (new_var);
4858 	      if (TREE_CODE (val) == ARRAY_REF
4859 		  && VAR_P (TREE_OPERAND (val, 0))
4860 		  && lookup_attribute ("omp simd array",
4861 				       DECL_ATTRIBUTES (TREE_OPERAND (val,
4862 								      0))))
4863 		{
4864 		  if (lastlane == NULL)
4865 		    {
4866 		      lastlane = create_tmp_var (unsigned_type_node);
4867 		      gcall *g
4868 			= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4869 						      2, simduid,
4870 						      TREE_OPERAND (val, 1));
4871 		      gimple_call_set_lhs (g, lastlane);
4872 		      gimple_seq_add_stmt (stmt_list, g);
4873 		    }
4874 		  new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4875 				    TREE_OPERAND (val, 0), lastlane,
4876 				    NULL_TREE, NULL_TREE);
4877 		}
4878 	    }
4879 	  else if (maybe_simt)
4880 	    {
4881 	      tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4882 			  ? DECL_VALUE_EXPR (new_var)
4883 			  : new_var);
4884 	      if (simtlast == NULL)
4885 		{
4886 		  simtlast = create_tmp_var (unsigned_type_node);
4887 		  gcall *g = gimple_build_call_internal
4888 		    (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4889 		  gimple_call_set_lhs (g, simtlast);
4890 		  gimple_seq_add_stmt (stmt_list, g);
4891 		}
4892 	      x = build_call_expr_internal_loc
4893 		(UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4894 		 TREE_TYPE (val), 2, val, simtlast);
4895 	      new_var = unshare_expr (new_var);
4896 	      gimplify_assign (new_var, x, stmt_list);
4897 	      new_var = unshare_expr (new_var);
4898 	    }
4899 
4900 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4901 	      && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4902 	    {
4903 	      lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4904 	      gimple_seq_add_seq (stmt_list,
4905 				  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4906 	      OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4907 	    }
4908 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4909 		   && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4910 	    {
4911 	      lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4912 	      gimple_seq_add_seq (stmt_list,
4913 				  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4914 	      OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4915 	    }
4916 
4917 	  x = NULL_TREE;
4918 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4919 	      && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4920 	    {
4921 	      gcc_checking_assert (is_taskloop_ctx (ctx));
4922 	      tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4923 							  ctx->outer->outer);
4924 	      if (is_global_var (ovar))
4925 		x = ovar;
4926 	    }
4927 	  if (!x)
4928 	    x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4929 	  if (omp_is_reference (var))
4930 	    new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4931 	  x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4932 	  gimplify_and_add (x, stmt_list);
4933 	}
4934       c = OMP_CLAUSE_CHAIN (c);
4935       if (c == NULL && !par_clauses)
4936 	{
4937 	  /* If this was a workshare clause, see if it had been combined
4938 	     with its parallel.  In that case, continue looking for the
4939 	     clauses also on the parallel statement itself.  */
4940 	  if (is_parallel_ctx (ctx))
4941 	    break;
4942 
4943 	  ctx = ctx->outer;
4944 	  if (ctx == NULL || !is_parallel_ctx (ctx))
4945 	    break;
4946 
4947 	  c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4948 			       OMP_CLAUSE_LASTPRIVATE);
4949 	  par_clauses = true;
4950 	}
4951     }
4952 
4953   if (label)
4954     gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4955 }
4956 
4957 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4958    (which might be a placeholder).  INNER is true if this is an inner
4959    axis of a multi-axis loop.  FORK and JOIN are (optional) fork and
4960    join markers.  Generate the before-loop forking sequence in
4961    FORK_SEQ and the after-loop joining sequence to JOIN_SEQ.  The
4962    general form of these sequences is
4963 
4964      GOACC_REDUCTION_SETUP
4965      GOACC_FORK
4966      GOACC_REDUCTION_INIT
4967      ...
4968      GOACC_REDUCTION_FINI
4969      GOACC_JOIN
4970      GOACC_REDUCTION_TEARDOWN.  */
4971 
4972 static void
4973 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4974 		       gcall *fork, gcall *join, gimple_seq *fork_seq,
4975 		       gimple_seq *join_seq, omp_context *ctx)
4976 {
4977   gimple_seq before_fork = NULL;
4978   gimple_seq after_fork = NULL;
4979   gimple_seq before_join = NULL;
4980   gimple_seq after_join = NULL;
4981   tree init_code = NULL_TREE, fini_code = NULL_TREE,
4982     setup_code = NULL_TREE, teardown_code = NULL_TREE;
4983   unsigned offset = 0;
4984 
4985   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4986     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4987       {
4988 	tree orig = OMP_CLAUSE_DECL (c);
4989 	tree var = maybe_lookup_decl (orig, ctx);
4990 	tree ref_to_res = NULL_TREE;
4991 	tree incoming, outgoing, v1, v2, v3;
4992 	bool is_private = false;
4993 
4994 	enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4995 	if (rcode == MINUS_EXPR)
4996 	  rcode = PLUS_EXPR;
4997 	else if (rcode == TRUTH_ANDIF_EXPR)
4998 	  rcode = BIT_AND_EXPR;
4999 	else if (rcode == TRUTH_ORIF_EXPR)
5000 	  rcode = BIT_IOR_EXPR;
5001 	tree op = build_int_cst (unsigned_type_node, rcode);
5002 
5003 	if (!var)
5004 	  var = orig;
5005 
5006 	incoming = outgoing = var;
5007 
5008 	if (!inner)
5009 	  {
5010 	    /* See if an outer construct also reduces this variable.  */
5011 	    omp_context *outer = ctx;
5012 
5013 	    while (omp_context *probe = outer->outer)
5014 	      {
5015 		enum gimple_code type = gimple_code (probe->stmt);
5016 		tree cls;
5017 
5018 		switch (type)
5019 		  {
5020 		  case GIMPLE_OMP_FOR:
5021 		    cls = gimple_omp_for_clauses (probe->stmt);
5022 		    break;
5023 
5024 		  case GIMPLE_OMP_TARGET:
5025 		    if (gimple_omp_target_kind (probe->stmt)
5026 			!= GF_OMP_TARGET_KIND_OACC_PARALLEL)
5027 		      goto do_lookup;
5028 
5029 		    cls = gimple_omp_target_clauses (probe->stmt);
5030 		    break;
5031 
5032 		  default:
5033 		    goto do_lookup;
5034 		  }
5035 
5036 		outer = probe;
5037 		for (; cls;  cls = OMP_CLAUSE_CHAIN (cls))
5038 		  if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
5039 		      && orig == OMP_CLAUSE_DECL (cls))
5040 		    {
5041 		      incoming = outgoing = lookup_decl (orig, probe);
5042 		      goto has_outer_reduction;
5043 		    }
5044 		  else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
5045 			    || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
5046 			   && orig == OMP_CLAUSE_DECL (cls))
5047 		    {
5048 		      is_private = true;
5049 		      goto do_lookup;
5050 		    }
5051 	      }
5052 
5053 	  do_lookup:
5054 	    /* This is the outermost construct with this reduction,
5055 	       see if there's a mapping for it.  */
5056 	    if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
5057 		&& maybe_lookup_field (orig, outer) && !is_private)
5058 	      {
5059 		ref_to_res = build_receiver_ref (orig, false, outer);
5060 		if (omp_is_reference (orig))
5061 		  ref_to_res = build_simple_mem_ref (ref_to_res);
5062 
5063 		tree type = TREE_TYPE (var);
5064 		if (POINTER_TYPE_P (type))
5065 		  type = TREE_TYPE (type);
5066 
5067 		outgoing = var;
5068 		incoming = omp_reduction_init_op (loc, rcode, type);
5069 	      }
5070 	    else
5071 	      {
5072 		/* Try to look at enclosing contexts for reduction var,
5073 		   use original if no mapping found.  */
5074 		tree t = NULL_TREE;
5075 		omp_context *c = ctx->outer;
5076 		while (c && !t)
5077 		  {
5078 		    t = maybe_lookup_decl (orig, c);
5079 		    c = c->outer;
5080 		  }
5081 		incoming = outgoing = (t ? t : orig);
5082 	      }
5083 
5084 	  has_outer_reduction:;
5085 	  }
5086 
5087 	if (!ref_to_res)
5088 	  ref_to_res = integer_zero_node;
5089 
5090 	if (omp_is_reference (orig))
5091 	  {
5092 	    tree type = TREE_TYPE (var);
5093 	    const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5094 
5095 	    if (!inner)
5096 	      {
5097 		tree x = create_tmp_var (TREE_TYPE (type), id);
5098 		gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5099 	      }
5100 
5101 	    v1 = create_tmp_var (type, id);
5102 	    v2 = create_tmp_var (type, id);
5103 	    v3 = create_tmp_var (type, id);
5104 
5105 	    gimplify_assign (v1, var, fork_seq);
5106 	    gimplify_assign (v2, var, fork_seq);
5107 	    gimplify_assign (v3, var, fork_seq);
5108 
5109 	    var = build_simple_mem_ref (var);
5110 	    v1 = build_simple_mem_ref (v1);
5111 	    v2 = build_simple_mem_ref (v2);
5112 	    v3 = build_simple_mem_ref (v3);
5113 	    outgoing = build_simple_mem_ref (outgoing);
5114 
5115 	    if (!TREE_CONSTANT (incoming))
5116 	      incoming = build_simple_mem_ref (incoming);
5117 	  }
5118 	else
5119 	  v1 = v2 = v3 = var;
5120 
5121 	/* Determine position in reduction buffer, which may be used
5122 	   by target.  */
5123 	enum machine_mode mode = TYPE_MODE (TREE_TYPE (var));
5124 	unsigned align = GET_MODE_ALIGNMENT (mode) /  BITS_PER_UNIT;
5125 	offset = (offset + align - 1) & ~(align - 1);
5126 	tree off = build_int_cst (sizetype, offset);
5127 	offset += GET_MODE_SIZE (mode);
5128 
5129 	if (!init_code)
5130 	  {
5131 	    init_code = build_int_cst (integer_type_node,
5132 				       IFN_GOACC_REDUCTION_INIT);
5133 	    fini_code = build_int_cst (integer_type_node,
5134 				       IFN_GOACC_REDUCTION_FINI);
5135 	    setup_code = build_int_cst (integer_type_node,
5136 					IFN_GOACC_REDUCTION_SETUP);
5137 	    teardown_code = build_int_cst (integer_type_node,
5138 					   IFN_GOACC_REDUCTION_TEARDOWN);
5139 	  }
5140 
5141 	tree setup_call
5142 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5143 					  TREE_TYPE (var), 6, setup_code,
5144 					  unshare_expr (ref_to_res),
5145 					  incoming, level, op, off);
5146 	tree init_call
5147 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5148 					  TREE_TYPE (var), 6, init_code,
5149 					  unshare_expr (ref_to_res),
5150 					  v1, level, op, off);
5151 	tree fini_call
5152 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5153 					  TREE_TYPE (var), 6, fini_code,
5154 					  unshare_expr (ref_to_res),
5155 					  v2, level, op, off);
5156 	tree teardown_call
5157 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5158 					  TREE_TYPE (var), 6, teardown_code,
5159 					  ref_to_res, v3, level, op, off);
5160 
5161 	gimplify_assign (v1, setup_call, &before_fork);
5162 	gimplify_assign (v2, init_call, &after_fork);
5163 	gimplify_assign (v3, fini_call, &before_join);
5164 	gimplify_assign (outgoing, teardown_call, &after_join);
5165       }
5166 
5167   /* Now stitch things together.  */
5168   gimple_seq_add_seq (fork_seq, before_fork);
5169   if (fork)
5170     gimple_seq_add_stmt (fork_seq, fork);
5171   gimple_seq_add_seq (fork_seq, after_fork);
5172 
5173   gimple_seq_add_seq (join_seq, before_join);
5174   if (join)
5175     gimple_seq_add_stmt (join_seq, join);
5176   gimple_seq_add_seq (join_seq, after_join);
5177 }
5178 
5179 /* Generate code to implement the REDUCTION clauses.  */
5180 
5181 static void
5182 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5183 {
5184   gimple_seq sub_seq = NULL;
5185   gimple *stmt;
5186   tree x, c;
5187   int count = 0;
5188 
5189   /* OpenACC loop reductions are handled elsewhere.  */
5190   if (is_gimple_omp_oacc (ctx->stmt))
5191     return;
5192 
5193   /* SIMD reductions are handled in lower_rec_input_clauses.  */
5194   if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5195       && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5196     return;
5197 
5198   /* First see if there is exactly one reduction clause.  Use OMP_ATOMIC
5199      update in that case, otherwise use a lock.  */
5200   for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5201     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5202       {
5203 	if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5204 	    || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5205 	  {
5206 	    /* Never use OMP_ATOMIC for array reductions or UDRs.  */
5207 	    count = -1;
5208 	    break;
5209 	  }
5210 	count++;
5211       }
5212 
5213   if (count == 0)
5214     return;
5215 
5216   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5217     {
5218       tree var, ref, new_var, orig_var;
5219       enum tree_code code;
5220       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5221 
5222       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5223 	continue;
5224 
5225       enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5226       orig_var = var = OMP_CLAUSE_DECL (c);
5227       if (TREE_CODE (var) == MEM_REF)
5228 	{
5229 	  var = TREE_OPERAND (var, 0);
5230 	  if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5231 	    var = TREE_OPERAND (var, 0);
5232 	  if (TREE_CODE (var) == ADDR_EXPR)
5233 	    var = TREE_OPERAND (var, 0);
5234 	  else
5235 	    {
5236 	      /* If this is a pointer or referenced based array
5237 		 section, the var could be private in the outer
5238 		 context e.g. on orphaned loop construct.  Pretend this
5239 		 is private variable's outer reference.  */
5240 	      ccode = OMP_CLAUSE_PRIVATE;
5241 	      if (TREE_CODE (var) == INDIRECT_REF)
5242 		var = TREE_OPERAND (var, 0);
5243 	    }
5244 	  orig_var = var;
5245 	  if (is_variable_sized (var))
5246 	    {
5247 	      gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5248 	      var = DECL_VALUE_EXPR (var);
5249 	      gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5250 	      var = TREE_OPERAND (var, 0);
5251 	      gcc_assert (DECL_P (var));
5252 	    }
5253 	}
5254       new_var = lookup_decl (var, ctx);
5255       if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5256 	new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5257       ref = build_outer_var_ref (var, ctx, ccode);
5258       code = OMP_CLAUSE_REDUCTION_CODE (c);
5259 
5260       /* reduction(-:var) sums up the partial results, so it acts
5261 	 identically to reduction(+:var).  */
5262       if (code == MINUS_EXPR)
5263         code = PLUS_EXPR;
5264 
5265       if (count == 1)
5266 	{
5267 	  tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5268 
5269 	  addr = save_expr (addr);
5270 	  ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5271 	  x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5272 	  x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5273 	  gimplify_and_add (x, stmt_seqp);
5274 	  return;
5275 	}
5276       else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5277 	{
5278 	  tree d = OMP_CLAUSE_DECL (c);
5279 	  tree type = TREE_TYPE (d);
5280 	  tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5281 	  tree i = create_tmp_var (TREE_TYPE (v), NULL);
5282 	  tree ptype = build_pointer_type (TREE_TYPE (type));
5283 	  tree bias = TREE_OPERAND (d, 1);
5284 	  d = TREE_OPERAND (d, 0);
5285 	  if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5286 	    {
5287 	      tree b = TREE_OPERAND (d, 1);
5288 	      b = maybe_lookup_decl (b, ctx);
5289 	      if (b == NULL)
5290 		{
5291 		  b = TREE_OPERAND (d, 1);
5292 		  b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5293 		}
5294 	      if (integer_zerop (bias))
5295 		bias = b;
5296 	      else
5297 		{
5298 		  bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5299 		  bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5300 					  TREE_TYPE (b), b, bias);
5301 		}
5302 	      d = TREE_OPERAND (d, 0);
5303 	    }
5304 	  /* For ref build_outer_var_ref already performs this, so
5305 	     only new_var needs a dereference.  */
5306 	  if (TREE_CODE (d) == INDIRECT_REF)
5307 	    {
5308 	      new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5309 	      gcc_assert (omp_is_reference (var) && var == orig_var);
5310 	    }
5311 	  else if (TREE_CODE (d) == ADDR_EXPR)
5312 	    {
5313 	      if (orig_var == var)
5314 		{
5315 		  new_var = build_fold_addr_expr (new_var);
5316 		  ref = build_fold_addr_expr (ref);
5317 		}
5318 	    }
5319 	  else
5320 	    {
5321 	      gcc_assert (orig_var == var);
5322 	      if (omp_is_reference (var))
5323 		ref = build_fold_addr_expr (ref);
5324 	    }
5325 	  if (DECL_P (v))
5326 	    {
5327 	      tree t = maybe_lookup_decl (v, ctx);
5328 	      if (t)
5329 		v = t;
5330 	      else
5331 		v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5332 	      gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5333 	    }
5334 	  if (!integer_zerop (bias))
5335 	    {
5336 	      bias = fold_convert_loc (clause_loc, sizetype, bias);
5337 	      new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5338 					 TREE_TYPE (new_var), new_var,
5339 					 unshare_expr (bias));
5340 	      ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5341 					 TREE_TYPE (ref), ref, bias);
5342 	    }
5343 	  new_var = fold_convert_loc (clause_loc, ptype, new_var);
5344 	  ref = fold_convert_loc (clause_loc, ptype, ref);
5345 	  tree m = create_tmp_var (ptype, NULL);
5346 	  gimplify_assign (m, new_var, stmt_seqp);
5347 	  new_var = m;
5348 	  m = create_tmp_var (ptype, NULL);
5349 	  gimplify_assign (m, ref, stmt_seqp);
5350 	  ref = m;
5351 	  gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5352 	  tree body = create_artificial_label (UNKNOWN_LOCATION);
5353 	  tree end = create_artificial_label (UNKNOWN_LOCATION);
5354 	  gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5355 	  tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5356 	  tree out = build_simple_mem_ref_loc (clause_loc, ref);
5357 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5358 	    {
5359 	      tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5360 	      tree decl_placeholder
5361 		= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5362 	      SET_DECL_VALUE_EXPR (placeholder, out);
5363 	      DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5364 	      SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5365 	      DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5366 	      lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5367 	      gimple_seq_add_seq (&sub_seq,
5368 				  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5369 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5370 	      OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5371 	      OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5372 	    }
5373 	  else
5374 	    {
5375 	      x = build2 (code, TREE_TYPE (out), out, priv);
5376 	      out = unshare_expr (out);
5377 	      gimplify_assign (out, x, &sub_seq);
5378 	    }
5379 	  gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5380 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
5381 	  gimple_seq_add_stmt (&sub_seq, g);
5382 	  g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5383 				   TYPE_SIZE_UNIT (TREE_TYPE (type)));
5384 	  gimple_seq_add_stmt (&sub_seq, g);
5385 	  g = gimple_build_assign (i, PLUS_EXPR, i,
5386 				   build_int_cst (TREE_TYPE (i), 1));
5387 	  gimple_seq_add_stmt (&sub_seq, g);
5388 	  g = gimple_build_cond (LE_EXPR, i, v, body, end);
5389 	  gimple_seq_add_stmt (&sub_seq, g);
5390 	  gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5391 	}
5392       else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5393 	{
5394 	  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5395 
5396 	  if (omp_is_reference (var)
5397 	      && !useless_type_conversion_p (TREE_TYPE (placeholder),
5398 					     TREE_TYPE (ref)))
5399 	    ref = build_fold_addr_expr_loc (clause_loc, ref);
5400 	  SET_DECL_VALUE_EXPR (placeholder, ref);
5401 	  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5402 	  lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5403 	  gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5404 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5405 	  OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5406 	}
5407       else
5408 	{
5409 	  x = build2 (code, TREE_TYPE (ref), ref, new_var);
5410 	  ref = build_outer_var_ref (var, ctx);
5411 	  gimplify_assign (ref, x, &sub_seq);
5412 	}
5413     }
5414 
5415   stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5416 			    0);
5417   gimple_seq_add_stmt (stmt_seqp, stmt);
5418 
5419   gimple_seq_add_seq (stmt_seqp, sub_seq);
5420 
5421   stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5422 			    0);
5423   gimple_seq_add_stmt (stmt_seqp, stmt);
5424 }
5425 
5426 
5427 /* Generate code to implement the COPYPRIVATE clauses.  */
5428 
5429 static void
5430 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5431 			    omp_context *ctx)
5432 {
5433   tree c;
5434 
5435   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5436     {
5437       tree var, new_var, ref, x;
5438       bool by_ref;
5439       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5440 
5441       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5442 	continue;
5443 
5444       var = OMP_CLAUSE_DECL (c);
5445       by_ref = use_pointer_for_field (var, NULL);
5446 
5447       ref = build_sender_ref (var, ctx);
5448       x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5449       if (by_ref)
5450 	{
5451 	  x = build_fold_addr_expr_loc (clause_loc, new_var);
5452 	  x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5453 	}
5454       gimplify_assign (ref, x, slist);
5455 
5456       ref = build_receiver_ref (var, false, ctx);
5457       if (by_ref)
5458 	{
5459 	  ref = fold_convert_loc (clause_loc,
5460 				  build_pointer_type (TREE_TYPE (new_var)),
5461 				  ref);
5462 	  ref = build_fold_indirect_ref_loc (clause_loc, ref);
5463 	}
5464       if (omp_is_reference (var))
5465 	{
5466 	  ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5467 	  ref = build_simple_mem_ref_loc (clause_loc, ref);
5468 	  new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5469 	}
5470       x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5471       gimplify_and_add (x, rlist);
5472     }
5473 }
5474 
5475 
5476 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5477    and REDUCTION from the sender (aka parent) side.  */
5478 
5479 static void
5480 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5481     		    omp_context *ctx)
5482 {
5483   tree c, t;
5484   int ignored_looptemp = 0;
5485   bool is_taskloop = false;
5486 
5487   /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5488      by GOMP_taskloop.  */
5489   if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5490     {
5491       ignored_looptemp = 2;
5492       is_taskloop = true;
5493     }
5494 
5495   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5496     {
5497       tree val, ref, x, var;
5498       bool by_ref, do_in = false, do_out = false;
5499       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5500 
5501       switch (OMP_CLAUSE_CODE (c))
5502 	{
5503 	case OMP_CLAUSE_PRIVATE:
5504 	  if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5505 	    break;
5506 	  continue;
5507 	case OMP_CLAUSE_FIRSTPRIVATE:
5508 	case OMP_CLAUSE_COPYIN:
5509 	case OMP_CLAUSE_LASTPRIVATE:
5510 	case OMP_CLAUSE_REDUCTION:
5511 	  break;
5512 	case OMP_CLAUSE_SHARED:
5513 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5514 	    break;
5515 	  continue;
5516 	case OMP_CLAUSE__LOOPTEMP_:
5517 	  if (ignored_looptemp)
5518 	    {
5519 	      ignored_looptemp--;
5520 	      continue;
5521 	    }
5522 	  break;
5523 	default:
5524 	  continue;
5525 	}
5526 
5527       val = OMP_CLAUSE_DECL (c);
5528       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5529 	  && TREE_CODE (val) == MEM_REF)
5530 	{
5531 	  val = TREE_OPERAND (val, 0);
5532 	  if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5533 	    val = TREE_OPERAND (val, 0);
5534 	  if (TREE_CODE (val) == INDIRECT_REF
5535 	      || TREE_CODE (val) == ADDR_EXPR)
5536 	    val = TREE_OPERAND (val, 0);
5537 	  if (is_variable_sized (val))
5538 	    continue;
5539 	}
5540 
5541       /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5542 	 outer taskloop region.  */
5543       omp_context *ctx_for_o = ctx;
5544       if (is_taskloop
5545 	  && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5546 	  && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5547 	ctx_for_o = ctx->outer;
5548 
5549       var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5550 
5551       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5552 	  && is_global_var (var))
5553 	continue;
5554 
5555       t = omp_member_access_dummy_var (var);
5556       if (t)
5557 	{
5558 	  var = DECL_VALUE_EXPR (var);
5559 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5560 	  if (o != t)
5561 	    var = unshare_and_remap (var, t, o);
5562 	  else
5563 	    var = unshare_expr (var);
5564 	}
5565 
5566       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5567 	{
5568 	  /* Handle taskloop firstprivate/lastprivate, where the
5569 	     lastprivate on GIMPLE_OMP_TASK is represented as
5570 	     OMP_CLAUSE_SHARED_FIRSTPRIVATE.  */
5571 	  tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5572 	  x = omp_build_component_ref (ctx->sender_decl, f);
5573 	  if (use_pointer_for_field (val, ctx))
5574 	    var = build_fold_addr_expr (var);
5575 	  gimplify_assign (x, var, ilist);
5576 	  DECL_ABSTRACT_ORIGIN (f) = NULL;
5577 	  continue;
5578 	}
5579 
5580       if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5581 	   || val == OMP_CLAUSE_DECL (c))
5582 	  && is_variable_sized (val))
5583 	continue;
5584       by_ref = use_pointer_for_field (val, NULL);
5585 
5586       switch (OMP_CLAUSE_CODE (c))
5587 	{
5588 	case OMP_CLAUSE_FIRSTPRIVATE:
5589 	  if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5590 	      && !by_ref
5591 	      && is_task_ctx (ctx))
5592 	    TREE_NO_WARNING (var) = 1;
5593 	  do_in = true;
5594 	  break;
5595 
5596 	case OMP_CLAUSE_PRIVATE:
5597 	case OMP_CLAUSE_COPYIN:
5598 	case OMP_CLAUSE__LOOPTEMP_:
5599 	  do_in = true;
5600 	  break;
5601 
5602 	case OMP_CLAUSE_LASTPRIVATE:
5603 	  if (by_ref || omp_is_reference (val))
5604 	    {
5605 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5606 		continue;
5607 	      do_in = true;
5608 	    }
5609 	  else
5610 	    {
5611 	      do_out = true;
5612 	      if (lang_hooks.decls.omp_private_outer_ref (val))
5613 		do_in = true;
5614 	    }
5615 	  break;
5616 
5617 	case OMP_CLAUSE_REDUCTION:
5618 	  do_in = true;
5619 	  if (val == OMP_CLAUSE_DECL (c))
5620 	    do_out = !(by_ref || omp_is_reference (val));
5621 	  else
5622 	    by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5623 	  break;
5624 
5625 	default:
5626 	  gcc_unreachable ();
5627 	}
5628 
5629       if (do_in)
5630 	{
5631 	  ref = build_sender_ref (val, ctx);
5632 	  x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5633 	  gimplify_assign (ref, x, ilist);
5634 	  if (is_task_ctx (ctx))
5635 	    DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5636 	}
5637 
5638       if (do_out)
5639 	{
5640 	  ref = build_sender_ref (val, ctx);
5641 	  gimplify_assign (var, ref, olist);
5642 	}
5643     }
5644 }
5645 
5646 /* Generate code to implement SHARED from the sender (aka parent)
5647    side.  This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5648    list things that got automatically shared.  */
5649 
5650 static void
5651 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5652 {
5653   tree var, ovar, nvar, t, f, x, record_type;
5654 
5655   if (ctx->record_type == NULL)
5656     return;
5657 
5658   record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5659   for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5660     {
5661       ovar = DECL_ABSTRACT_ORIGIN (f);
5662       if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5663 	continue;
5664 
5665       nvar = maybe_lookup_decl (ovar, ctx);
5666       if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5667 	continue;
5668 
5669       /* If CTX is a nested parallel directive.  Find the immediately
5670 	 enclosing parallel or workshare construct that contains a
5671 	 mapping for OVAR.  */
5672       var = lookup_decl_in_outer_ctx (ovar, ctx);
5673 
5674       t = omp_member_access_dummy_var (var);
5675       if (t)
5676 	{
5677 	  var = DECL_VALUE_EXPR (var);
5678 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5679 	  if (o != t)
5680 	    var = unshare_and_remap (var, t, o);
5681 	  else
5682 	    var = unshare_expr (var);
5683 	}
5684 
5685       if (use_pointer_for_field (ovar, ctx))
5686 	{
5687 	  x = build_sender_ref (ovar, ctx);
5688 	  var = build_fold_addr_expr (var);
5689 	  gimplify_assign (x, var, ilist);
5690 	}
5691       else
5692 	{
5693 	  x = build_sender_ref (ovar, ctx);
5694 	  gimplify_assign (x, var, ilist);
5695 
5696 	  if (!TREE_READONLY (var)
5697 	      /* We don't need to receive a new reference to a result
5698 	         or parm decl.  In fact we may not store to it as we will
5699 		 invalidate any pending RSO and generate wrong gimple
5700 		 during inlining.  */
5701 	      && !((TREE_CODE (var) == RESULT_DECL
5702 		    || TREE_CODE (var) == PARM_DECL)
5703 		   && DECL_BY_REFERENCE (var)))
5704 	    {
5705 	      x = build_sender_ref (ovar, ctx);
5706 	      gimplify_assign (var, x, olist);
5707 	    }
5708 	}
5709     }
5710 }
5711 
5712 /* Emit an OpenACC head marker call, encapulating the partitioning and
5713    other information that must be processed by the target compiler.
5714    Return the maximum number of dimensions the associated loop might
5715    be partitioned over.  */
5716 
5717 static unsigned
5718 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5719 		      gimple_seq *seq, omp_context *ctx)
5720 {
5721   unsigned levels = 0;
5722   unsigned tag = 0;
5723   tree gang_static = NULL_TREE;
5724   auto_vec<tree, 5> args;
5725 
5726   args.quick_push (build_int_cst
5727 		   (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5728   args.quick_push (ddvar);
5729   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5730     {
5731       switch (OMP_CLAUSE_CODE (c))
5732 	{
5733 	case OMP_CLAUSE_GANG:
5734 	  tag |= OLF_DIM_GANG;
5735 	  gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5736 	  /* static:* is represented by -1, and we can ignore it, as
5737 	     scheduling is always static.  */
5738 	  if (gang_static && integer_minus_onep (gang_static))
5739 	    gang_static = NULL_TREE;
5740 	  levels++;
5741 	  break;
5742 
5743 	case OMP_CLAUSE_WORKER:
5744 	  tag |= OLF_DIM_WORKER;
5745 	  levels++;
5746 	  break;
5747 
5748 	case OMP_CLAUSE_VECTOR:
5749 	  tag |= OLF_DIM_VECTOR;
5750 	  levels++;
5751 	  break;
5752 
5753 	case OMP_CLAUSE_SEQ:
5754 	  tag |= OLF_SEQ;
5755 	  break;
5756 
5757 	case OMP_CLAUSE_AUTO:
5758 	  tag |= OLF_AUTO;
5759 	  break;
5760 
5761 	case OMP_CLAUSE_INDEPENDENT:
5762 	  tag |= OLF_INDEPENDENT;
5763 	  break;
5764 
5765 	case OMP_CLAUSE_TILE:
5766 	  tag |= OLF_TILE;
5767 	  break;
5768 
5769 	default:
5770 	  continue;
5771 	}
5772     }
5773 
5774   if (gang_static)
5775     {
5776       if (DECL_P (gang_static))
5777 	gang_static = build_outer_var_ref (gang_static, ctx);
5778       tag |= OLF_GANG_STATIC;
5779     }
5780 
5781   /* In a parallel region, loops are implicitly INDEPENDENT.  */
5782   omp_context *tgt = enclosing_target_ctx (ctx);
5783   if (!tgt || is_oacc_parallel (tgt))
5784     tag |= OLF_INDEPENDENT;
5785 
5786   if (tag & OLF_TILE)
5787     /* Tiling could use all 3 levels.  */
5788     levels = 3;
5789   else
5790     {
5791       /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5792 	 Ensure at least one level, or 2 for possible auto
5793 	 partitioning */
5794       bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5795 				  << OLF_DIM_BASE) | OLF_SEQ));
5796 
5797       if (levels < 1u + maybe_auto)
5798 	levels = 1u + maybe_auto;
5799     }
5800 
5801   args.quick_push (build_int_cst (integer_type_node, levels));
5802   args.quick_push (build_int_cst (integer_type_node, tag));
5803   if (gang_static)
5804     args.quick_push (gang_static);
5805 
5806   gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5807   gimple_set_location (call, loc);
5808   gimple_set_lhs (call, ddvar);
5809   gimple_seq_add_stmt (seq, call);
5810 
5811   return levels;
5812 }
5813 
5814 /* Emit an OpenACC lopp head or tail marker to SEQ.  LEVEL is the
5815    partitioning level of the enclosed region.  */
5816 
5817 static void
5818 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5819 			tree tofollow, gimple_seq *seq)
5820 {
5821   int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5822 		     : IFN_UNIQUE_OACC_TAIL_MARK);
5823   tree marker = build_int_cst (integer_type_node, marker_kind);
5824   int nargs = 2 + (tofollow != NULL_TREE);
5825   gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5826 					    marker, ddvar, tofollow);
5827   gimple_set_location (call, loc);
5828   gimple_set_lhs (call, ddvar);
5829   gimple_seq_add_stmt (seq, call);
5830 }
5831 
5832 /* Generate the before and after OpenACC loop sequences.  CLAUSES are
5833    the loop clauses, from which we extract reductions.  Initialize
5834    HEAD and TAIL.  */
5835 
5836 static void
5837 lower_oacc_head_tail (location_t loc, tree clauses,
5838 		      gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5839 {
5840   bool inner = false;
5841   tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5842   gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5843 
5844   unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5845   tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5846   tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5847 
5848   gcc_assert (count);
5849   for (unsigned done = 1; count; count--, done++)
5850     {
5851       gimple_seq fork_seq = NULL;
5852       gimple_seq join_seq = NULL;
5853 
5854       tree place = build_int_cst (integer_type_node, -1);
5855       gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5856 						fork_kind, ddvar, place);
5857       gimple_set_location (fork, loc);
5858       gimple_set_lhs (fork, ddvar);
5859 
5860       gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5861 						join_kind, ddvar, place);
5862       gimple_set_location (join, loc);
5863       gimple_set_lhs (join, ddvar);
5864 
5865       /* Mark the beginning of this level sequence.  */
5866       if (inner)
5867 	lower_oacc_loop_marker (loc, ddvar, true,
5868 				build_int_cst (integer_type_node, count),
5869 				&fork_seq);
5870       lower_oacc_loop_marker (loc, ddvar, false,
5871 			      build_int_cst (integer_type_node, done),
5872 			      &join_seq);
5873 
5874       lower_oacc_reductions (loc, clauses, place, inner,
5875 			     fork, join, &fork_seq, &join_seq,  ctx);
5876 
5877       /* Append this level to head. */
5878       gimple_seq_add_seq (head, fork_seq);
5879       /* Prepend it to tail.  */
5880       gimple_seq_add_seq (&join_seq, *tail);
5881       *tail = join_seq;
5882 
5883       inner = true;
5884     }
5885 
5886   /* Mark the end of the sequence.  */
5887   lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5888   lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5889 }
5890 
5891 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5892    catch handler and return it.  This prevents programs from violating the
5893    structured block semantics with throws.  */
5894 
5895 static gimple_seq
5896 maybe_catch_exception (gimple_seq body)
5897 {
5898   gimple *g;
5899   tree decl;
5900 
5901   if (!flag_exceptions)
5902     return body;
5903 
5904   if (lang_hooks.eh_protect_cleanup_actions != NULL)
5905     decl = lang_hooks.eh_protect_cleanup_actions ();
5906   else
5907     decl = builtin_decl_explicit (BUILT_IN_TRAP);
5908 
5909   g = gimple_build_eh_must_not_throw (decl);
5910   g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5911       			GIMPLE_TRY_CATCH);
5912 
5913  return gimple_seq_alloc_with_stmt (g);
5914 }
5915 
5916 
5917 /* Routines to lower OMP directives into OMP-GIMPLE.  */
5918 
5919 /* If ctx is a worksharing context inside of a cancellable parallel
5920    region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5921    and conditional branch to parallel's cancel_label to handle
5922    cancellation in the implicit barrier.  */
5923 
5924 static void
5925 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5926 {
5927   gimple *omp_return = gimple_seq_last_stmt (*body);
5928   gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5929   if (gimple_omp_return_nowait_p (omp_return))
5930     return;
5931   if (ctx->outer
5932       && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5933       && ctx->outer->cancellable)
5934     {
5935       tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5936       tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5937       tree lhs = create_tmp_var (c_bool_type);
5938       gimple_omp_return_set_lhs (omp_return, lhs);
5939       tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5940       gimple *g = gimple_build_cond (NE_EXPR, lhs,
5941 				    fold_convert (c_bool_type,
5942 						  boolean_false_node),
5943 				    ctx->outer->cancel_label, fallthru_label);
5944       gimple_seq_add_stmt (body, g);
5945       gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5946     }
5947 }
5948 
5949 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5950    CTX is the enclosing OMP context for the current statement.  */
5951 
5952 static void
5953 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5954 {
5955   tree block, control;
5956   gimple_stmt_iterator tgsi;
5957   gomp_sections *stmt;
5958   gimple *t;
5959   gbind *new_stmt, *bind;
5960   gimple_seq ilist, dlist, olist, new_body;
5961 
5962   stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5963 
5964   push_gimplify_context ();
5965 
5966   dlist = NULL;
5967   ilist = NULL;
5968   lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5969       			   &ilist, &dlist, ctx, NULL);
5970 
5971   new_body = gimple_omp_body (stmt);
5972   gimple_omp_set_body (stmt, NULL);
5973   tgsi = gsi_start (new_body);
5974   for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5975     {
5976       omp_context *sctx;
5977       gimple *sec_start;
5978 
5979       sec_start = gsi_stmt (tgsi);
5980       sctx = maybe_lookup_ctx (sec_start);
5981       gcc_assert (sctx);
5982 
5983       lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5984       gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5985 			    GSI_CONTINUE_LINKING);
5986       gimple_omp_set_body (sec_start, NULL);
5987 
5988       if (gsi_one_before_end_p (tgsi))
5989 	{
5990 	  gimple_seq l = NULL;
5991 	  lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5992 				     &l, ctx);
5993 	  gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5994 	  gimple_omp_section_set_last (sec_start);
5995 	}
5996 
5997       gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5998 			GSI_CONTINUE_LINKING);
5999     }
6000 
6001   block = make_node (BLOCK);
6002   bind = gimple_build_bind (NULL, new_body, block);
6003 
6004   olist = NULL;
6005   lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
6006 
6007   block = make_node (BLOCK);
6008   new_stmt = gimple_build_bind (NULL, NULL, block);
6009   gsi_replace (gsi_p, new_stmt, true);
6010 
6011   pop_gimplify_context (new_stmt);
6012   gimple_bind_append_vars (new_stmt, ctx->block_vars);
6013   BLOCK_VARS (block) = gimple_bind_vars (bind);
6014   if (BLOCK_VARS (block))
6015     TREE_USED (block) = 1;
6016 
6017   new_body = NULL;
6018   gimple_seq_add_seq (&new_body, ilist);
6019   gimple_seq_add_stmt (&new_body, stmt);
6020   gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
6021   gimple_seq_add_stmt (&new_body, bind);
6022 
6023   control = create_tmp_var (unsigned_type_node, ".section");
6024   t = gimple_build_omp_continue (control, control);
6025   gimple_omp_sections_set_control (stmt, control);
6026   gimple_seq_add_stmt (&new_body, t);
6027 
6028   gimple_seq_add_seq (&new_body, olist);
6029   if (ctx->cancellable)
6030     gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
6031   gimple_seq_add_seq (&new_body, dlist);
6032 
6033   new_body = maybe_catch_exception (new_body);
6034 
6035   bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
6036 				 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6037   t = gimple_build_omp_return (nowait);
6038   gimple_seq_add_stmt (&new_body, t);
6039   maybe_add_implicit_barrier_cancel (ctx, &new_body);
6040 
6041   gimple_bind_set_body (new_stmt, new_body);
6042 }
6043 
6044 
6045 /* A subroutine of lower_omp_single.  Expand the simple form of
6046    a GIMPLE_OMP_SINGLE, without a copyprivate clause:
6047 
6048      	if (GOMP_single_start ())
6049 	  BODY;
6050 	[ GOMP_barrier (); ]	-> unless 'nowait' is present.
6051 
6052   FIXME.  It may be better to delay expanding the logic of this until
6053   pass_expand_omp.  The expanded logic may make the job more difficult
6054   to a synchronization analysis pass.  */
6055 
6056 static void
6057 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
6058 {
6059   location_t loc = gimple_location (single_stmt);
6060   tree tlabel = create_artificial_label (loc);
6061   tree flabel = create_artificial_label (loc);
6062   gimple *call, *cond;
6063   tree lhs, decl;
6064 
6065   decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
6066   lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
6067   call = gimple_build_call (decl, 0);
6068   gimple_call_set_lhs (call, lhs);
6069   gimple_seq_add_stmt (pre_p, call);
6070 
6071   cond = gimple_build_cond (EQ_EXPR, lhs,
6072 			    fold_convert_loc (loc, TREE_TYPE (lhs),
6073 					      boolean_true_node),
6074 			    tlabel, flabel);
6075   gimple_seq_add_stmt (pre_p, cond);
6076   gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
6077   gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6078   gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
6079 }
6080 
6081 
6082 /* A subroutine of lower_omp_single.  Expand the simple form of
6083    a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6084 
6085 	#pragma omp single copyprivate (a, b, c)
6086 
6087    Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6088 
6089       {
6090 	if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6091 	  {
6092 	    BODY;
6093 	    copyout.a = a;
6094 	    copyout.b = b;
6095 	    copyout.c = c;
6096 	    GOMP_single_copy_end (&copyout);
6097 	  }
6098 	else
6099 	  {
6100 	    a = copyout_p->a;
6101 	    b = copyout_p->b;
6102 	    c = copyout_p->c;
6103 	  }
6104 	GOMP_barrier ();
6105       }
6106 
6107   FIXME.  It may be better to delay expanding the logic of this until
6108   pass_expand_omp.  The expanded logic may make the job more difficult
6109   to a synchronization analysis pass.  */
6110 
6111 static void
6112 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6113 		       omp_context *ctx)
6114 {
6115   tree ptr_type, t, l0, l1, l2, bfn_decl;
6116   gimple_seq copyin_seq;
6117   location_t loc = gimple_location (single_stmt);
6118 
6119   ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6120 
6121   ptr_type = build_pointer_type (ctx->record_type);
6122   ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6123 
6124   l0 = create_artificial_label (loc);
6125   l1 = create_artificial_label (loc);
6126   l2 = create_artificial_label (loc);
6127 
6128   bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6129   t = build_call_expr_loc (loc, bfn_decl, 0);
6130   t = fold_convert_loc (loc, ptr_type, t);
6131   gimplify_assign (ctx->receiver_decl, t, pre_p);
6132 
6133   t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6134 	      build_int_cst (ptr_type, 0));
6135   t = build3 (COND_EXPR, void_type_node, t,
6136 	      build_and_jump (&l0), build_and_jump (&l1));
6137   gimplify_and_add (t, pre_p);
6138 
6139   gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6140 
6141   gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6142 
6143   copyin_seq = NULL;
6144   lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6145 			      &copyin_seq, ctx);
6146 
6147   t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6148   bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6149   t = build_call_expr_loc (loc, bfn_decl, 1, t);
6150   gimplify_and_add (t, pre_p);
6151 
6152   t = build_and_jump (&l2);
6153   gimplify_and_add (t, pre_p);
6154 
6155   gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6156 
6157   gimple_seq_add_seq (pre_p, copyin_seq);
6158 
6159   gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6160 }
6161 
6162 
6163 /* Expand code for an OpenMP single directive.  */
6164 
6165 static void
6166 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6167 {
6168   tree block;
6169   gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6170   gbind *bind;
6171   gimple_seq bind_body, bind_body_tail = NULL, dlist;
6172 
6173   push_gimplify_context ();
6174 
6175   block = make_node (BLOCK);
6176   bind = gimple_build_bind (NULL, NULL, block);
6177   gsi_replace (gsi_p, bind, true);
6178   bind_body = NULL;
6179   dlist = NULL;
6180   lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6181 			   &bind_body, &dlist, ctx, NULL);
6182   lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6183 
6184   gimple_seq_add_stmt (&bind_body, single_stmt);
6185 
6186   if (ctx->record_type)
6187     lower_omp_single_copy (single_stmt, &bind_body, ctx);
6188   else
6189     lower_omp_single_simple (single_stmt, &bind_body);
6190 
6191   gimple_omp_set_body (single_stmt, NULL);
6192 
6193   gimple_seq_add_seq (&bind_body, dlist);
6194 
6195   bind_body = maybe_catch_exception (bind_body);
6196 
6197   bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6198 				 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6199   gimple *g = gimple_build_omp_return (nowait);
6200   gimple_seq_add_stmt (&bind_body_tail, g);
6201   maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6202   if (ctx->record_type)
6203     {
6204       gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6205       tree clobber = build_constructor (ctx->record_type, NULL);
6206       TREE_THIS_VOLATILE (clobber) = 1;
6207       gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6208 						   clobber), GSI_SAME_STMT);
6209     }
6210   gimple_seq_add_seq (&bind_body, bind_body_tail);
6211   gimple_bind_set_body (bind, bind_body);
6212 
6213   pop_gimplify_context (bind);
6214 
6215   gimple_bind_append_vars (bind, ctx->block_vars);
6216   BLOCK_VARS (block) = ctx->block_vars;
6217   if (BLOCK_VARS (block))
6218     TREE_USED (block) = 1;
6219 }
6220 
6221 
6222 /* Expand code for an OpenMP master directive.  */
6223 
6224 static void
6225 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6226 {
6227   tree block, lab = NULL, x, bfn_decl;
6228   gimple *stmt = gsi_stmt (*gsi_p);
6229   gbind *bind;
6230   location_t loc = gimple_location (stmt);
6231   gimple_seq tseq;
6232 
6233   push_gimplify_context ();
6234 
6235   block = make_node (BLOCK);
6236   bind = gimple_build_bind (NULL, NULL, block);
6237   gsi_replace (gsi_p, bind, true);
6238   gimple_bind_add_stmt (bind, stmt);
6239 
6240   bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6241   x = build_call_expr_loc (loc, bfn_decl, 0);
6242   x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6243   x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6244   tseq = NULL;
6245   gimplify_and_add (x, &tseq);
6246   gimple_bind_add_seq (bind, tseq);
6247 
6248   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6249   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6250   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6251   gimple_omp_set_body (stmt, NULL);
6252 
6253   gimple_bind_add_stmt (bind, gimple_build_label (lab));
6254 
6255   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6256 
6257   pop_gimplify_context (bind);
6258 
6259   gimple_bind_append_vars (bind, ctx->block_vars);
6260   BLOCK_VARS (block) = ctx->block_vars;
6261 }
6262 
6263 
6264 /* Expand code for an OpenMP taskgroup directive.  */
6265 
6266 static void
6267 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6268 {
6269   gimple *stmt = gsi_stmt (*gsi_p);
6270   gcall *x;
6271   gbind *bind;
6272   tree block = make_node (BLOCK);
6273 
6274   bind = gimple_build_bind (NULL, NULL, block);
6275   gsi_replace (gsi_p, bind, true);
6276   gimple_bind_add_stmt (bind, stmt);
6277 
6278   x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6279 			 0);
6280   gimple_bind_add_stmt (bind, x);
6281 
6282   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6283   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6284   gimple_omp_set_body (stmt, NULL);
6285 
6286   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6287 
6288   gimple_bind_append_vars (bind, ctx->block_vars);
6289   BLOCK_VARS (block) = ctx->block_vars;
6290 }
6291 
6292 
6293 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible.  */
6294 
6295 static void
6296 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6297 			   omp_context *ctx)
6298 {
6299   struct omp_for_data fd;
6300   if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6301     return;
6302 
6303   unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6304   struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6305   omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6306   if (!fd.ordered)
6307     return;
6308 
6309   tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6310   tree c = gimple_omp_ordered_clauses (ord_stmt);
6311   if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6312       && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6313     {
6314       /* Merge depend clauses from multiple adjacent
6315 	 #pragma omp ordered depend(sink:...) constructs
6316 	 into one #pragma omp ordered depend(sink:...), so that
6317 	 we can optimize them together.  */
6318       gimple_stmt_iterator gsi = *gsi_p;
6319       gsi_next (&gsi);
6320       while (!gsi_end_p (gsi))
6321 	{
6322 	  gimple *stmt = gsi_stmt (gsi);
6323 	  if (is_gimple_debug (stmt)
6324 	      || gimple_code (stmt) == GIMPLE_NOP)
6325 	    {
6326 	      gsi_next (&gsi);
6327 	      continue;
6328 	    }
6329 	  if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6330 	    break;
6331 	  gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6332 	  c = gimple_omp_ordered_clauses (ord_stmt2);
6333 	  if (c == NULL_TREE
6334 	      || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6335 	      || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6336 	    break;
6337 	  while (*list_p)
6338 	    list_p = &OMP_CLAUSE_CHAIN (*list_p);
6339 	  *list_p = c;
6340 	  gsi_remove (&gsi, true);
6341 	}
6342     }
6343 
6344   /* Canonicalize sink dependence clauses into one folded clause if
6345      possible.
6346 
6347      The basic algorithm is to create a sink vector whose first
6348      element is the GCD of all the first elements, and whose remaining
6349      elements are the minimum of the subsequent columns.
6350 
6351      We ignore dependence vectors whose first element is zero because
6352      such dependencies are known to be executed by the same thread.
6353 
6354      We take into account the direction of the loop, so a minimum
6355      becomes a maximum if the loop is iterating forwards.  We also
6356      ignore sink clauses where the loop direction is unknown, or where
6357      the offsets are clearly invalid because they are not a multiple
6358      of the loop increment.
6359 
6360      For example:
6361 
6362 	#pragma omp for ordered(2)
6363 	for (i=0; i < N; ++i)
6364 	  for (j=0; j < M; ++j)
6365 	    {
6366 	      #pragma omp ordered \
6367 		depend(sink:i-8,j-2) \
6368 		depend(sink:i,j-1) \	// Completely ignored because i+0.
6369 		depend(sink:i-4,j-3) \
6370 		depend(sink:i-6,j-4)
6371 	      #pragma omp ordered depend(source)
6372 	    }
6373 
6374      Folded clause is:
6375 
6376 	depend(sink:-gcd(8,4,6),-min(2,3,4))
6377 	  -or-
6378 	depend(sink:-2,-2)
6379   */
6380 
6381   /* FIXME: Computing GCD's where the first element is zero is
6382      non-trivial in the presence of collapsed loops.  Do this later.  */
6383   if (fd.collapse > 1)
6384     return;
6385 
6386   wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6387   memset (folded_deps, 0, sizeof (*folded_deps) * (2 * len - 1));
6388   tree folded_dep = NULL_TREE;
6389   /* TRUE if the first dimension's offset is negative.  */
6390   bool neg_offset_p = false;
6391 
6392   list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6393   unsigned int i;
6394   while ((c = *list_p) != NULL)
6395     {
6396       bool remove = false;
6397 
6398       gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6399       if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6400 	goto next_ordered_clause;
6401 
6402       tree vec;
6403       for (vec = OMP_CLAUSE_DECL (c), i = 0;
6404 	   vec && TREE_CODE (vec) == TREE_LIST;
6405 	   vec = TREE_CHAIN (vec), ++i)
6406 	{
6407 	  gcc_assert (i < len);
6408 
6409 	  /* omp_extract_for_data has canonicalized the condition.  */
6410 	  gcc_assert (fd.loops[i].cond_code == LT_EXPR
6411 		      || fd.loops[i].cond_code == GT_EXPR);
6412 	  bool forward = fd.loops[i].cond_code == LT_EXPR;
6413 	  bool maybe_lexically_later = true;
6414 
6415 	  /* While the committee makes up its mind, bail if we have any
6416 	     non-constant steps.  */
6417 	  if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6418 	    goto lower_omp_ordered_ret;
6419 
6420 	  tree itype = TREE_TYPE (TREE_VALUE (vec));
6421 	  if (POINTER_TYPE_P (itype))
6422 	    itype = sizetype;
6423 	  wide_int offset = wide_int::from (TREE_PURPOSE (vec),
6424 					    TYPE_PRECISION (itype),
6425 					    TYPE_SIGN (itype));
6426 
6427 	  /* Ignore invalid offsets that are not multiples of the step.  */
6428 	  if (!wi::multiple_of_p
6429 	      (wi::abs (offset), wi::abs ((wide_int) fd.loops[i].step),
6430 	       UNSIGNED))
6431 	    {
6432 	      warning_at (OMP_CLAUSE_LOCATION (c), 0,
6433 			  "ignoring sink clause with offset that is not "
6434 			  "a multiple of the loop step");
6435 	      remove = true;
6436 	      goto next_ordered_clause;
6437 	    }
6438 
6439 	  /* Calculate the first dimension.  The first dimension of
6440 	     the folded dependency vector is the GCD of the first
6441 	     elements, while ignoring any first elements whose offset
6442 	     is 0.  */
6443 	  if (i == 0)
6444 	    {
6445 	      /* Ignore dependence vectors whose first dimension is 0.  */
6446 	      if (offset == 0)
6447 		{
6448 		  remove = true;
6449 		  goto next_ordered_clause;
6450 		}
6451 	      else
6452 		{
6453 		  if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6454 		    {
6455 		      error_at (OMP_CLAUSE_LOCATION (c),
6456 				"first offset must be in opposite direction "
6457 				"of loop iterations");
6458 		      goto lower_omp_ordered_ret;
6459 		    }
6460 		  if (forward)
6461 		    offset = -offset;
6462 		  neg_offset_p = forward;
6463 		  /* Initialize the first time around.  */
6464 		  if (folded_dep == NULL_TREE)
6465 		    {
6466 		      folded_dep = c;
6467 		      folded_deps[0] = offset;
6468 		    }
6469 		  else
6470 		    folded_deps[0] = wi::gcd (folded_deps[0],
6471 					      offset, UNSIGNED);
6472 		}
6473 	    }
6474 	  /* Calculate minimum for the remaining dimensions.  */
6475 	  else
6476 	    {
6477 	      folded_deps[len + i - 1] = offset;
6478 	      if (folded_dep == c)
6479 		folded_deps[i] = offset;
6480 	      else if (maybe_lexically_later
6481 		       && !wi::eq_p (folded_deps[i], offset))
6482 		{
6483 		  if (forward ^ wi::gts_p (folded_deps[i], offset))
6484 		    {
6485 		      unsigned int j;
6486 		      folded_dep = c;
6487 		      for (j = 1; j <= i; j++)
6488 			folded_deps[j] = folded_deps[len + j - 1];
6489 		    }
6490 		  else
6491 		    maybe_lexically_later = false;
6492 		}
6493 	    }
6494 	}
6495       gcc_assert (i == len);
6496 
6497       remove = true;
6498 
6499     next_ordered_clause:
6500       if (remove)
6501 	*list_p = OMP_CLAUSE_CHAIN (c);
6502       else
6503 	list_p = &OMP_CLAUSE_CHAIN (c);
6504     }
6505 
6506   if (folded_dep)
6507     {
6508       if (neg_offset_p)
6509 	folded_deps[0] = -folded_deps[0];
6510 
6511       tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6512       if (POINTER_TYPE_P (itype))
6513 	itype = sizetype;
6514 
6515       TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6516 	= wide_int_to_tree (itype, folded_deps[0]);
6517       OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6518       *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6519     }
6520 
6521  lower_omp_ordered_ret:
6522 
6523   /* Ordered without clauses is #pragma omp threads, while we want
6524      a nop instead if we remove all clauses.  */
6525   if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6526     gsi_replace (gsi_p, gimple_build_nop (), true);
6527 }
6528 
6529 
6530 /* Expand code for an OpenMP ordered directive.  */
6531 
6532 static void
6533 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6534 {
6535   tree block;
6536   gimple *stmt = gsi_stmt (*gsi_p), *g;
6537   gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6538   gcall *x;
6539   gbind *bind;
6540   bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6541 			       OMP_CLAUSE_SIMD);
6542   /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6543      loop.  */
6544   bool maybe_simt
6545     = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6546   bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6547 				  OMP_CLAUSE_THREADS);
6548 
6549   if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6550 		       OMP_CLAUSE_DEPEND))
6551     {
6552       /* FIXME: This is needs to be moved to the expansion to verify various
6553 	 conditions only testable on cfg with dominators computed, and also
6554 	 all the depend clauses to be merged still might need to be available
6555 	 for the runtime checks.  */
6556       if (0)
6557 	lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6558       return;
6559     }
6560 
6561   push_gimplify_context ();
6562 
6563   block = make_node (BLOCK);
6564   bind = gimple_build_bind (NULL, NULL, block);
6565   gsi_replace (gsi_p, bind, true);
6566   gimple_bind_add_stmt (bind, stmt);
6567 
6568   if (simd)
6569     {
6570       x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6571 				      build_int_cst (NULL_TREE, threads));
6572       cfun->has_simduid_loops = true;
6573     }
6574   else
6575     x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6576 			   0);
6577   gimple_bind_add_stmt (bind, x);
6578 
6579   tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6580   if (maybe_simt)
6581     {
6582       counter = create_tmp_var (integer_type_node);
6583       g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6584       gimple_call_set_lhs (g, counter);
6585       gimple_bind_add_stmt (bind, g);
6586 
6587       body = create_artificial_label (UNKNOWN_LOCATION);
6588       test = create_artificial_label (UNKNOWN_LOCATION);
6589       gimple_bind_add_stmt (bind, gimple_build_label (body));
6590 
6591       tree simt_pred = create_tmp_var (integer_type_node);
6592       g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6593       gimple_call_set_lhs (g, simt_pred);
6594       gimple_bind_add_stmt (bind, g);
6595 
6596       tree t = create_artificial_label (UNKNOWN_LOCATION);
6597       g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6598       gimple_bind_add_stmt (bind, g);
6599 
6600       gimple_bind_add_stmt (bind, gimple_build_label (t));
6601     }
6602   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6603   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6604   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6605   gimple_omp_set_body (stmt, NULL);
6606 
6607   if (maybe_simt)
6608     {
6609       gimple_bind_add_stmt (bind, gimple_build_label (test));
6610       g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6611       gimple_bind_add_stmt (bind, g);
6612 
6613       tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6614       tree nonneg = create_tmp_var (integer_type_node);
6615       gimple_seq tseq = NULL;
6616       gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6617       gimple_bind_add_seq (bind, tseq);
6618 
6619       g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6620       gimple_call_set_lhs (g, nonneg);
6621       gimple_bind_add_stmt (bind, g);
6622 
6623       tree end = create_artificial_label (UNKNOWN_LOCATION);
6624       g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6625       gimple_bind_add_stmt (bind, g);
6626 
6627       gimple_bind_add_stmt (bind, gimple_build_label (end));
6628     }
6629   if (simd)
6630     x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6631 				    build_int_cst (NULL_TREE, threads));
6632   else
6633     x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6634 			   0);
6635   gimple_bind_add_stmt (bind, x);
6636 
6637   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6638 
6639   pop_gimplify_context (bind);
6640 
6641   gimple_bind_append_vars (bind, ctx->block_vars);
6642   BLOCK_VARS (block) = gimple_bind_vars (bind);
6643 }
6644 
6645 
6646 /* Gimplify a GIMPLE_OMP_CRITICAL statement.  This is a relatively simple
6647    substitution of a couple of function calls.  But in the NAMED case,
6648    requires that languages coordinate a symbol name.  It is therefore
6649    best put here in common code.  */
6650 
6651 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6652 
6653 static void
6654 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6655 {
6656   tree block;
6657   tree name, lock, unlock;
6658   gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6659   gbind *bind;
6660   location_t loc = gimple_location (stmt);
6661   gimple_seq tbody;
6662 
6663   name = gimple_omp_critical_name (stmt);
6664   if (name)
6665     {
6666       tree decl;
6667 
6668       if (!critical_name_mutexes)
6669 	critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6670 
6671       tree *n = critical_name_mutexes->get (name);
6672       if (n == NULL)
6673 	{
6674 	  char *new_str;
6675 
6676 	  decl = create_tmp_var_raw (ptr_type_node);
6677 
6678 	  new_str = ACONCAT ((".gomp_critical_user_",
6679 			      IDENTIFIER_POINTER (name), NULL));
6680 	  DECL_NAME (decl) = get_identifier (new_str);
6681 	  TREE_PUBLIC (decl) = 1;
6682 	  TREE_STATIC (decl) = 1;
6683 	  DECL_COMMON (decl) = 1;
6684 	  DECL_ARTIFICIAL (decl) = 1;
6685 	  DECL_IGNORED_P (decl) = 1;
6686 
6687 	  varpool_node::finalize_decl (decl);
6688 
6689 	  critical_name_mutexes->put (name, decl);
6690 	}
6691       else
6692 	decl = *n;
6693 
6694       /* If '#pragma omp critical' is inside offloaded region or
6695 	 inside function marked as offloadable, the symbol must be
6696 	 marked as offloadable too.  */
6697       omp_context *octx;
6698       if (cgraph_node::get (current_function_decl)->offloadable)
6699 	varpool_node::get_create (decl)->offloadable = 1;
6700       else
6701 	for (octx = ctx->outer; octx; octx = octx->outer)
6702 	  if (is_gimple_omp_offloaded (octx->stmt))
6703 	    {
6704 	      varpool_node::get_create (decl)->offloadable = 1;
6705 	      break;
6706 	    }
6707 
6708       lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6709       lock = build_call_expr_loc (loc, lock, 1,
6710 				  build_fold_addr_expr_loc (loc, decl));
6711 
6712       unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6713       unlock = build_call_expr_loc (loc, unlock, 1,
6714 				build_fold_addr_expr_loc (loc, decl));
6715     }
6716   else
6717     {
6718       lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6719       lock = build_call_expr_loc (loc, lock, 0);
6720 
6721       unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6722       unlock = build_call_expr_loc (loc, unlock, 0);
6723     }
6724 
6725   push_gimplify_context ();
6726 
6727   block = make_node (BLOCK);
6728   bind = gimple_build_bind (NULL, NULL, block);
6729   gsi_replace (gsi_p, bind, true);
6730   gimple_bind_add_stmt (bind, stmt);
6731 
6732   tbody = gimple_bind_body (bind);
6733   gimplify_and_add (lock, &tbody);
6734   gimple_bind_set_body (bind, tbody);
6735 
6736   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6737   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6738   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6739   gimple_omp_set_body (stmt, NULL);
6740 
6741   tbody = gimple_bind_body (bind);
6742   gimplify_and_add (unlock, &tbody);
6743   gimple_bind_set_body (bind, tbody);
6744 
6745   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6746 
6747   pop_gimplify_context (bind);
6748   gimple_bind_append_vars (bind, ctx->block_vars);
6749   BLOCK_VARS (block) = gimple_bind_vars (bind);
6750 }
6751 
6752 /* A subroutine of lower_omp_for.  Generate code to emit the predicate
6753    for a lastprivate clause.  Given a loop control predicate of (V
6754    cond N2), we gate the clause on (!(V cond N2)).  The lowered form
6755    is appended to *DLIST, iterator initialization is appended to
6756    *BODY_P.  */
6757 
6758 static void
6759 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6760 			   gimple_seq *dlist, struct omp_context *ctx)
6761 {
6762   tree clauses, cond, vinit;
6763   enum tree_code cond_code;
6764   gimple_seq stmts;
6765 
6766   cond_code = fd->loop.cond_code;
6767   cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6768 
6769   /* When possible, use a strict equality expression.  This can let VRP
6770      type optimizations deduce the value and remove a copy.  */
6771   if (tree_fits_shwi_p (fd->loop.step))
6772     {
6773       HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6774       if (step == 1 || step == -1)
6775 	cond_code = EQ_EXPR;
6776     }
6777 
6778   if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6779       || gimple_omp_for_grid_phony (fd->for_stmt))
6780     cond = omp_grid_lastprivate_predicate (fd);
6781   else
6782     {
6783       tree n2 = fd->loop.n2;
6784       if (fd->collapse > 1
6785 	  && TREE_CODE (n2) != INTEGER_CST
6786 	  && gimple_omp_for_combined_into_p (fd->for_stmt))
6787 	{
6788 	  struct omp_context *taskreg_ctx = NULL;
6789 	  if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6790 	    {
6791 	      gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6792 	      if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6793 		  || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6794 		{
6795 		  if (gimple_omp_for_combined_into_p (gfor))
6796 		    {
6797 		      gcc_assert (ctx->outer->outer
6798 				  && is_parallel_ctx (ctx->outer->outer));
6799 		      taskreg_ctx = ctx->outer->outer;
6800 		    }
6801 		  else
6802 		    {
6803 		      struct omp_for_data outer_fd;
6804 		      omp_extract_for_data (gfor, &outer_fd, NULL);
6805 		      n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6806 		    }
6807 		}
6808 	      else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6809 		taskreg_ctx = ctx->outer->outer;
6810 	    }
6811 	  else if (is_taskreg_ctx (ctx->outer))
6812 	    taskreg_ctx = ctx->outer;
6813 	  if (taskreg_ctx)
6814 	    {
6815 	      int i;
6816 	      tree taskreg_clauses
6817 		= gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6818 	      tree innerc = omp_find_clause (taskreg_clauses,
6819 					     OMP_CLAUSE__LOOPTEMP_);
6820 	      gcc_assert (innerc);
6821 	      for (i = 0; i < fd->collapse; i++)
6822 		{
6823 		  innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6824 					    OMP_CLAUSE__LOOPTEMP_);
6825 		  gcc_assert (innerc);
6826 		}
6827 	      innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6828 					OMP_CLAUSE__LOOPTEMP_);
6829 	      if (innerc)
6830 		n2 = fold_convert (TREE_TYPE (n2),
6831 				   lookup_decl (OMP_CLAUSE_DECL (innerc),
6832 						taskreg_ctx));
6833 	    }
6834 	}
6835       cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6836     }
6837 
6838   clauses = gimple_omp_for_clauses (fd->for_stmt);
6839   stmts = NULL;
6840   lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6841   if (!gimple_seq_empty_p (stmts))
6842     {
6843       gimple_seq_add_seq (&stmts, *dlist);
6844       *dlist = stmts;
6845 
6846       /* Optimize: v = 0; is usually cheaper than v = some_other_constant.  */
6847       vinit = fd->loop.n1;
6848       if (cond_code == EQ_EXPR
6849 	  && tree_fits_shwi_p (fd->loop.n2)
6850 	  && ! integer_zerop (fd->loop.n2))
6851 	vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6852       else
6853 	vinit = unshare_expr (vinit);
6854 
6855       /* Initialize the iterator variable, so that threads that don't execute
6856 	 any iterations don't execute the lastprivate clauses by accident.  */
6857       gimplify_assign (fd->loop.v, vinit, body_p);
6858     }
6859 }
6860 
6861 
6862 /* Lower code for an OMP loop directive.  */
6863 
6864 static void
6865 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6866 {
6867   tree *rhs_p, block;
6868   struct omp_for_data fd, *fdp = NULL;
6869   gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6870   gbind *new_stmt;
6871   gimple_seq omp_for_body, body, dlist;
6872   gimple_seq oacc_head = NULL, oacc_tail = NULL;
6873   size_t i;
6874 
6875   push_gimplify_context ();
6876 
6877   lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6878 
6879   block = make_node (BLOCK);
6880   new_stmt = gimple_build_bind (NULL, NULL, block);
6881   /* Replace at gsi right away, so that 'stmt' is no member
6882      of a sequence anymore as we're going to add to a different
6883      one below.  */
6884   gsi_replace (gsi_p, new_stmt, true);
6885 
6886   /* Move declaration of temporaries in the loop body before we make
6887      it go away.  */
6888   omp_for_body = gimple_omp_body (stmt);
6889   if (!gimple_seq_empty_p (omp_for_body)
6890       && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6891     {
6892       gbind *inner_bind
6893 	= as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6894       tree vars = gimple_bind_vars (inner_bind);
6895       gimple_bind_append_vars (new_stmt, vars);
6896       /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6897 	 keep them on the inner_bind and it's block.  */
6898       gimple_bind_set_vars (inner_bind, NULL_TREE);
6899       if (gimple_bind_block (inner_bind))
6900 	BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6901     }
6902 
6903   if (gimple_omp_for_combined_into_p (stmt))
6904     {
6905       omp_extract_for_data (stmt, &fd, NULL);
6906       fdp = &fd;
6907 
6908       /* We need two temporaries with fd.loop.v type (istart/iend)
6909 	 and then (fd.collapse - 1) temporaries with the same
6910 	 type for count2 ... countN-1 vars if not constant.  */
6911       size_t count = 2;
6912       tree type = fd.iter_type;
6913       if (fd.collapse > 1
6914 	  && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6915 	count += fd.collapse - 1;
6916       bool taskreg_for
6917 	= (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6918 	   || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6919       tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6920       tree simtc = NULL;
6921       tree clauses = *pc;
6922       if (taskreg_for)
6923 	outerc
6924 	  = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6925 			     OMP_CLAUSE__LOOPTEMP_);
6926       if (ctx->simt_stmt)
6927 	simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6928 				 OMP_CLAUSE__LOOPTEMP_);
6929       for (i = 0; i < count; i++)
6930 	{
6931 	  tree temp;
6932 	  if (taskreg_for)
6933 	    {
6934 	      gcc_assert (outerc);
6935 	      temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6936 	      outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6937 					OMP_CLAUSE__LOOPTEMP_);
6938 	    }
6939 	  else
6940 	    {
6941 	      /* If there are 2 adjacent SIMD stmts, one with _simt_
6942 		 clause, another without, make sure they have the same
6943 		 decls in _looptemp_ clauses, because the outer stmt
6944 		 they are combined into will look up just one inner_stmt.  */
6945 	      if (ctx->simt_stmt)
6946 		temp = OMP_CLAUSE_DECL (simtc);
6947 	      else
6948 		temp = create_tmp_var (type);
6949 	      insert_decl_map (&ctx->outer->cb, temp, temp);
6950 	    }
6951 	  *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6952 	  OMP_CLAUSE_DECL (*pc) = temp;
6953 	  pc = &OMP_CLAUSE_CHAIN (*pc);
6954 	  if (ctx->simt_stmt)
6955 	    simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6956 				     OMP_CLAUSE__LOOPTEMP_);
6957 	}
6958       *pc = clauses;
6959     }
6960 
6961   /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR.  */
6962   dlist = NULL;
6963   body = NULL;
6964   lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6965 			   fdp);
6966   gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6967 
6968   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6969 
6970   /* Lower the header expressions.  At this point, we can assume that
6971      the header is of the form:
6972 
6973      	#pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6974 
6975      We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6976      using the .omp_data_s mapping, if needed.  */
6977   for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6978     {
6979       rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6980       if (!is_gimple_min_invariant (*rhs_p))
6981 	*rhs_p = get_formal_tmp_var (*rhs_p, &body);
6982       else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6983 	recompute_tree_invariant_for_addr_expr (*rhs_p);
6984 
6985       rhs_p = gimple_omp_for_final_ptr (stmt, i);
6986       if (!is_gimple_min_invariant (*rhs_p))
6987 	*rhs_p = get_formal_tmp_var (*rhs_p, &body);
6988       else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6989 	recompute_tree_invariant_for_addr_expr (*rhs_p);
6990 
6991       rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6992       if (!is_gimple_min_invariant (*rhs_p))
6993 	*rhs_p = get_formal_tmp_var (*rhs_p, &body);
6994     }
6995 
6996   /* Once lowered, extract the bounds and clauses.  */
6997   omp_extract_for_data (stmt, &fd, NULL);
6998 
6999   if (is_gimple_omp_oacc (ctx->stmt)
7000       && !ctx_in_oacc_kernels_region (ctx))
7001     lower_oacc_head_tail (gimple_location (stmt),
7002 			  gimple_omp_for_clauses (stmt),
7003 			  &oacc_head, &oacc_tail, ctx);
7004 
7005   /* Add OpenACC partitioning and reduction markers just before the loop.  */
7006   if (oacc_head)
7007     gimple_seq_add_seq (&body, oacc_head);
7008 
7009   lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
7010 
7011   if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
7012     for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
7013       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7014 	  && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7015 	{
7016 	  OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7017 	  if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
7018 	    OMP_CLAUSE_LINEAR_STEP (c)
7019 	      = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
7020 						ctx);
7021 	}
7022 
7023   bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
7024 		     && gimple_omp_for_grid_phony (stmt));
7025   if (!phony_loop)
7026     gimple_seq_add_stmt (&body, stmt);
7027   gimple_seq_add_seq (&body, gimple_omp_body (stmt));
7028 
7029   if (!phony_loop)
7030     gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
7031 							   fd.loop.v));
7032 
7033   /* After the loop, add exit clauses.  */
7034   lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
7035 
7036   if (ctx->cancellable)
7037     gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
7038 
7039   gimple_seq_add_seq (&body, dlist);
7040 
7041   body = maybe_catch_exception (body);
7042 
7043   if (!phony_loop)
7044     {
7045       /* Region exit marker goes at the end of the loop body.  */
7046       gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
7047       maybe_add_implicit_barrier_cancel (ctx, &body);
7048     }
7049 
7050   /* Add OpenACC joining and reduction markers just after the loop.  */
7051   if (oacc_tail)
7052     gimple_seq_add_seq (&body, oacc_tail);
7053 
7054   pop_gimplify_context (new_stmt);
7055 
7056   gimple_bind_append_vars (new_stmt, ctx->block_vars);
7057   maybe_remove_omp_member_access_dummy_vars (new_stmt);
7058   BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
7059   if (BLOCK_VARS (block))
7060     TREE_USED (block) = 1;
7061 
7062   gimple_bind_set_body (new_stmt, body);
7063   gimple_omp_set_body (stmt, NULL);
7064   gimple_omp_for_set_pre_body (stmt, NULL);
7065 }
7066 
7067 /* Callback for walk_stmts.  Check if the current statement only contains
7068    GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS.  */
7069 
7070 static tree
7071 check_combined_parallel (gimple_stmt_iterator *gsi_p,
7072     			 bool *handled_ops_p,
7073     			 struct walk_stmt_info *wi)
7074 {
7075   int *info = (int *) wi->info;
7076   gimple *stmt = gsi_stmt (*gsi_p);
7077 
7078   *handled_ops_p = true;
7079   switch (gimple_code (stmt))
7080     {
7081     WALK_SUBSTMTS;
7082 
7083     case GIMPLE_OMP_FOR:
7084     case GIMPLE_OMP_SECTIONS:
7085       *info = *info == 0 ? 1 : -1;
7086       break;
7087     default:
7088       *info = -1;
7089       break;
7090     }
7091   return NULL;
7092 }
7093 
7094 struct omp_taskcopy_context
7095 {
7096   /* This field must be at the beginning, as we do "inheritance": Some
7097      callback functions for tree-inline.c (e.g., omp_copy_decl)
7098      receive a copy_body_data pointer that is up-casted to an
7099      omp_context pointer.  */
7100   copy_body_data cb;
7101   omp_context *ctx;
7102 };
7103 
7104 static tree
7105 task_copyfn_copy_decl (tree var, copy_body_data *cb)
7106 {
7107   struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
7108 
7109   if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7110     return create_tmp_var (TREE_TYPE (var));
7111 
7112   return var;
7113 }
7114 
7115 static tree
7116 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7117 {
7118   tree name, new_fields = NULL, type, f;
7119 
7120   type = lang_hooks.types.make_type (RECORD_TYPE);
7121   name = DECL_NAME (TYPE_NAME (orig_type));
7122   name = build_decl (gimple_location (tcctx->ctx->stmt),
7123 		     TYPE_DECL, name, type);
7124   TYPE_NAME (type) = name;
7125 
7126   for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7127     {
7128       tree new_f = copy_node (f);
7129       DECL_CONTEXT (new_f) = type;
7130       TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7131       TREE_CHAIN (new_f) = new_fields;
7132       walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7133       walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7134       walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7135 		 &tcctx->cb, NULL);
7136       new_fields = new_f;
7137       tcctx->cb.decl_map->put (f, new_f);
7138     }
7139   TYPE_FIELDS (type) = nreverse (new_fields);
7140   layout_type (type);
7141   return type;
7142 }
7143 
7144 /* Create task copyfn.  */
7145 
7146 static void
7147 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7148 {
7149   struct function *child_cfun;
7150   tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7151   tree record_type, srecord_type, bind, list;
7152   bool record_needs_remap = false, srecord_needs_remap = false;
7153   splay_tree_node n;
7154   struct omp_taskcopy_context tcctx;
7155   location_t loc = gimple_location (task_stmt);
7156   size_t looptempno = 0;
7157 
7158   child_fn = gimple_omp_task_copy_fn (task_stmt);
7159   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7160   gcc_assert (child_cfun->cfg == NULL);
7161   DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7162 
7163   /* Reset DECL_CONTEXT on function arguments.  */
7164   for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7165     DECL_CONTEXT (t) = child_fn;
7166 
7167   /* Populate the function.  */
7168   push_gimplify_context ();
7169   push_cfun (child_cfun);
7170 
7171   bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7172   TREE_SIDE_EFFECTS (bind) = 1;
7173   list = NULL;
7174   DECL_SAVED_TREE (child_fn) = bind;
7175   DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7176 
7177   /* Remap src and dst argument types if needed.  */
7178   record_type = ctx->record_type;
7179   srecord_type = ctx->srecord_type;
7180   for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7181     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7182       {
7183 	record_needs_remap = true;
7184 	break;
7185       }
7186   for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7187     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7188       {
7189 	srecord_needs_remap = true;
7190 	break;
7191       }
7192 
7193   if (record_needs_remap || srecord_needs_remap)
7194     {
7195       memset (&tcctx, '\0', sizeof (tcctx));
7196       tcctx.cb.src_fn = ctx->cb.src_fn;
7197       tcctx.cb.dst_fn = child_fn;
7198       tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7199       gcc_checking_assert (tcctx.cb.src_node);
7200       tcctx.cb.dst_node = tcctx.cb.src_node;
7201       tcctx.cb.src_cfun = ctx->cb.src_cfun;
7202       tcctx.cb.copy_decl = task_copyfn_copy_decl;
7203       tcctx.cb.eh_lp_nr = 0;
7204       tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7205       tcctx.cb.decl_map = new hash_map<tree, tree>;
7206       tcctx.ctx = ctx;
7207 
7208       if (record_needs_remap)
7209 	record_type = task_copyfn_remap_type (&tcctx, record_type);
7210       if (srecord_needs_remap)
7211 	srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7212     }
7213   else
7214     tcctx.cb.decl_map = NULL;
7215 
7216   arg = DECL_ARGUMENTS (child_fn);
7217   TREE_TYPE (arg) = build_pointer_type (record_type);
7218   sarg = DECL_CHAIN (arg);
7219   TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7220 
7221   /* First pass: initialize temporaries used in record_type and srecord_type
7222      sizes and field offsets.  */
7223   if (tcctx.cb.decl_map)
7224     for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7225       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7226 	{
7227 	  tree *p;
7228 
7229 	  decl = OMP_CLAUSE_DECL (c);
7230 	  p = tcctx.cb.decl_map->get (decl);
7231 	  if (p == NULL)
7232 	    continue;
7233 	  n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7234 	  sf = (tree) n->value;
7235 	  sf = *tcctx.cb.decl_map->get (sf);
7236 	  src = build_simple_mem_ref_loc (loc, sarg);
7237 	  src = omp_build_component_ref (src, sf);
7238 	  t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7239 	  append_to_statement_list (t, &list);
7240 	}
7241 
7242   /* Second pass: copy shared var pointers and copy construct non-VLA
7243      firstprivate vars.  */
7244   for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7245     switch (OMP_CLAUSE_CODE (c))
7246       {
7247 	splay_tree_key key;
7248       case OMP_CLAUSE_SHARED:
7249 	decl = OMP_CLAUSE_DECL (c);
7250 	key = (splay_tree_key) decl;
7251 	if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7252 	  key = (splay_tree_key) &DECL_UID (decl);
7253 	n = splay_tree_lookup (ctx->field_map, key);
7254 	if (n == NULL)
7255 	  break;
7256 	f = (tree) n->value;
7257 	if (tcctx.cb.decl_map)
7258 	  f = *tcctx.cb.decl_map->get (f);
7259 	n = splay_tree_lookup (ctx->sfield_map, key);
7260 	sf = (tree) n->value;
7261 	if (tcctx.cb.decl_map)
7262 	  sf = *tcctx.cb.decl_map->get (sf);
7263 	src = build_simple_mem_ref_loc (loc, sarg);
7264 	src = omp_build_component_ref (src, sf);
7265 	dst = build_simple_mem_ref_loc (loc, arg);
7266 	dst = omp_build_component_ref (dst, f);
7267 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7268 	append_to_statement_list (t, &list);
7269 	break;
7270       case OMP_CLAUSE__LOOPTEMP_:
7271 	/* Fields for first two _looptemp_ clauses are initialized by
7272 	   GOMP_taskloop*, the rest are handled like firstprivate.  */
7273         if (looptempno < 2)
7274 	  {
7275 	    looptempno++;
7276 	    break;
7277 	  }
7278 	/* FALLTHRU */
7279       case OMP_CLAUSE_FIRSTPRIVATE:
7280 	decl = OMP_CLAUSE_DECL (c);
7281 	if (is_variable_sized (decl))
7282 	  break;
7283 	n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7284 	if (n == NULL)
7285 	  break;
7286 	f = (tree) n->value;
7287 	if (tcctx.cb.decl_map)
7288 	  f = *tcctx.cb.decl_map->get (f);
7289 	n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7290 	if (n != NULL)
7291 	  {
7292 	    sf = (tree) n->value;
7293 	    if (tcctx.cb.decl_map)
7294 	      sf = *tcctx.cb.decl_map->get (sf);
7295 	    src = build_simple_mem_ref_loc (loc, sarg);
7296 	    src = omp_build_component_ref (src, sf);
7297 	    if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7298 	      src = build_simple_mem_ref_loc (loc, src);
7299 	  }
7300 	else
7301 	  src = decl;
7302 	dst = build_simple_mem_ref_loc (loc, arg);
7303 	dst = omp_build_component_ref (dst, f);
7304 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__LOOPTEMP_)
7305 	  t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7306 	else
7307 	  t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7308 	append_to_statement_list (t, &list);
7309 	break;
7310       case OMP_CLAUSE_PRIVATE:
7311 	if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7312 	  break;
7313 	decl = OMP_CLAUSE_DECL (c);
7314 	n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7315 	f = (tree) n->value;
7316 	if (tcctx.cb.decl_map)
7317 	  f = *tcctx.cb.decl_map->get (f);
7318 	n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7319 	if (n != NULL)
7320 	  {
7321 	    sf = (tree) n->value;
7322 	    if (tcctx.cb.decl_map)
7323 	      sf = *tcctx.cb.decl_map->get (sf);
7324 	    src = build_simple_mem_ref_loc (loc, sarg);
7325 	    src = omp_build_component_ref (src, sf);
7326 	    if (use_pointer_for_field (decl, NULL))
7327 	      src = build_simple_mem_ref_loc (loc, src);
7328 	  }
7329 	else
7330 	  src = decl;
7331 	dst = build_simple_mem_ref_loc (loc, arg);
7332 	dst = omp_build_component_ref (dst, f);
7333 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7334 	append_to_statement_list (t, &list);
7335 	break;
7336       default:
7337 	break;
7338       }
7339 
7340   /* Last pass: handle VLA firstprivates.  */
7341   if (tcctx.cb.decl_map)
7342     for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7343       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7344 	{
7345 	  tree ind, ptr, df;
7346 
7347 	  decl = OMP_CLAUSE_DECL (c);
7348 	  if (!is_variable_sized (decl))
7349 	    continue;
7350 	  n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7351 	  if (n == NULL)
7352 	    continue;
7353 	  f = (tree) n->value;
7354 	  f = *tcctx.cb.decl_map->get (f);
7355 	  gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7356 	  ind = DECL_VALUE_EXPR (decl);
7357 	  gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7358 	  gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7359 	  n = splay_tree_lookup (ctx->sfield_map,
7360 				 (splay_tree_key) TREE_OPERAND (ind, 0));
7361 	  sf = (tree) n->value;
7362 	  sf = *tcctx.cb.decl_map->get (sf);
7363 	  src = build_simple_mem_ref_loc (loc, sarg);
7364 	  src = omp_build_component_ref (src, sf);
7365 	  src = build_simple_mem_ref_loc (loc, src);
7366 	  dst = build_simple_mem_ref_loc (loc, arg);
7367 	  dst = omp_build_component_ref (dst, f);
7368 	  t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7369 	  append_to_statement_list (t, &list);
7370 	  n = splay_tree_lookup (ctx->field_map,
7371 				 (splay_tree_key) TREE_OPERAND (ind, 0));
7372 	  df = (tree) n->value;
7373 	  df = *tcctx.cb.decl_map->get (df);
7374 	  ptr = build_simple_mem_ref_loc (loc, arg);
7375 	  ptr = omp_build_component_ref (ptr, df);
7376 	  t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7377 		      build_fold_addr_expr_loc (loc, dst));
7378 	  append_to_statement_list (t, &list);
7379 	}
7380 
7381   t = build1 (RETURN_EXPR, void_type_node, NULL);
7382   append_to_statement_list (t, &list);
7383 
7384   if (tcctx.cb.decl_map)
7385     delete tcctx.cb.decl_map;
7386   pop_gimplify_context (NULL);
7387   BIND_EXPR_BODY (bind) = list;
7388   pop_cfun ();
7389 }
7390 
7391 static void
7392 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7393 {
7394   tree c, clauses;
7395   gimple *g;
7396   size_t n_in = 0, n_out = 0, idx = 2, i;
7397 
7398   clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7399   gcc_assert (clauses);
7400   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7401     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7402       switch (OMP_CLAUSE_DEPEND_KIND (c))
7403 	{
7404 	case OMP_CLAUSE_DEPEND_IN:
7405 	  n_in++;
7406 	  break;
7407 	case OMP_CLAUSE_DEPEND_OUT:
7408 	case OMP_CLAUSE_DEPEND_INOUT:
7409 	  n_out++;
7410 	  break;
7411 	case OMP_CLAUSE_DEPEND_SOURCE:
7412 	case OMP_CLAUSE_DEPEND_SINK:
7413 	  /* FALLTHRU */
7414 	default:
7415 	  gcc_unreachable ();
7416 	}
7417   tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7418   tree array = create_tmp_var (type);
7419   TREE_ADDRESSABLE (array) = 1;
7420   tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7421 		   NULL_TREE);
7422   g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7423   gimple_seq_add_stmt (iseq, g);
7424   r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7425 	      NULL_TREE);
7426   g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7427   gimple_seq_add_stmt (iseq, g);
7428   for (i = 0; i < 2; i++)
7429     {
7430       if ((i ? n_in : n_out) == 0)
7431 	continue;
7432       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7433 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7434 	    && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7435 	  {
7436 	    tree t = OMP_CLAUSE_DECL (c);
7437 	    t = fold_convert (ptr_type_node, t);
7438 	    gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7439 	    r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7440 			NULL_TREE, NULL_TREE);
7441 	    g = gimple_build_assign (r, t);
7442 	    gimple_seq_add_stmt (iseq, g);
7443 	  }
7444     }
7445   c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7446   OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7447   OMP_CLAUSE_CHAIN (c) = *pclauses;
7448   *pclauses = c;
7449   tree clobber = build_constructor (type, NULL);
7450   TREE_THIS_VOLATILE (clobber) = 1;
7451   g = gimple_build_assign (array, clobber);
7452   gimple_seq_add_stmt (oseq, g);
7453 }
7454 
7455 /* Lower the OpenMP parallel or task directive in the current statement
7456    in GSI_P.  CTX holds context information for the directive.  */
7457 
7458 static void
7459 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7460 {
7461   tree clauses;
7462   tree child_fn, t;
7463   gimple *stmt = gsi_stmt (*gsi_p);
7464   gbind *par_bind, *bind, *dep_bind = NULL;
7465   gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7466   location_t loc = gimple_location (stmt);
7467 
7468   clauses = gimple_omp_taskreg_clauses (stmt);
7469   par_bind
7470     = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7471   par_body = gimple_bind_body (par_bind);
7472   child_fn = ctx->cb.dst_fn;
7473   if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7474       && !gimple_omp_parallel_combined_p (stmt))
7475     {
7476       struct walk_stmt_info wi;
7477       int ws_num = 0;
7478 
7479       memset (&wi, 0, sizeof (wi));
7480       wi.info = &ws_num;
7481       wi.val_only = true;
7482       walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7483       if (ws_num == 1)
7484 	gimple_omp_parallel_set_combined_p (stmt, true);
7485     }
7486   gimple_seq dep_ilist = NULL;
7487   gimple_seq dep_olist = NULL;
7488   if (gimple_code (stmt) == GIMPLE_OMP_TASK
7489       && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7490     {
7491       push_gimplify_context ();
7492       dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7493       lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7494 			    &dep_ilist, &dep_olist);
7495     }
7496 
7497   if (ctx->srecord_type)
7498     create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7499 
7500   push_gimplify_context ();
7501 
7502   par_olist = NULL;
7503   par_ilist = NULL;
7504   par_rlist = NULL;
7505   bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7506     && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7507   if (phony_construct && ctx->record_type)
7508     {
7509       gcc_checking_assert (!ctx->receiver_decl);
7510       ctx->receiver_decl = create_tmp_var
7511 	(build_reference_type (ctx->record_type), ".omp_rec");
7512     }
7513   lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7514   lower_omp (&par_body, ctx);
7515   if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7516     lower_reduction_clauses (clauses, &par_rlist, ctx);
7517 
7518   /* Declare all the variables created by mapping and the variables
7519      declared in the scope of the parallel body.  */
7520   record_vars_into (ctx->block_vars, child_fn);
7521   maybe_remove_omp_member_access_dummy_vars (par_bind);
7522   record_vars_into (gimple_bind_vars (par_bind), child_fn);
7523 
7524   if (ctx->record_type)
7525     {
7526       ctx->sender_decl
7527 	= create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7528 			  : ctx->record_type, ".omp_data_o");
7529       DECL_NAMELESS (ctx->sender_decl) = 1;
7530       TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7531       gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7532     }
7533 
7534   olist = NULL;
7535   ilist = NULL;
7536   lower_send_clauses (clauses, &ilist, &olist, ctx);
7537   lower_send_shared_vars (&ilist, &olist, ctx);
7538 
7539   if (ctx->record_type)
7540     {
7541       tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7542       TREE_THIS_VOLATILE (clobber) = 1;
7543       gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7544 							clobber));
7545     }
7546 
7547   /* Once all the expansions are done, sequence all the different
7548      fragments inside gimple_omp_body.  */
7549 
7550   new_body = NULL;
7551 
7552   if (ctx->record_type)
7553     {
7554       t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7555       /* fixup_child_record_type might have changed receiver_decl's type.  */
7556       t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7557       gimple_seq_add_stmt (&new_body,
7558 	  		   gimple_build_assign (ctx->receiver_decl, t));
7559     }
7560 
7561   gimple_seq_add_seq (&new_body, par_ilist);
7562   gimple_seq_add_seq (&new_body, par_body);
7563   gimple_seq_add_seq (&new_body, par_rlist);
7564   if (ctx->cancellable)
7565     gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7566   gimple_seq_add_seq (&new_body, par_olist);
7567   new_body = maybe_catch_exception (new_body);
7568   if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7569     gimple_seq_add_stmt (&new_body,
7570 			 gimple_build_omp_continue (integer_zero_node,
7571 						    integer_zero_node));
7572   if (!phony_construct)
7573     {
7574       gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7575       gimple_omp_set_body (stmt, new_body);
7576     }
7577 
7578   bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7579   gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7580   gimple_bind_add_seq (bind, ilist);
7581   if (!phony_construct)
7582     gimple_bind_add_stmt (bind, stmt);
7583   else
7584     gimple_bind_add_seq (bind, new_body);
7585   gimple_bind_add_seq (bind, olist);
7586 
7587   pop_gimplify_context (NULL);
7588 
7589   if (dep_bind)
7590     {
7591       gimple_bind_add_seq (dep_bind, dep_ilist);
7592       gimple_bind_add_stmt (dep_bind, bind);
7593       gimple_bind_add_seq (dep_bind, dep_olist);
7594       pop_gimplify_context (dep_bind);
7595     }
7596 }
7597 
7598 /* Lower the GIMPLE_OMP_TARGET in the current statement
7599    in GSI_P.  CTX holds context information for the directive.  */
7600 
7601 static void
7602 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7603 {
7604   tree clauses;
7605   tree child_fn, t, c;
7606   gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7607   gbind *tgt_bind, *bind, *dep_bind = NULL;
7608   gimple_seq tgt_body, olist, ilist, fplist, new_body;
7609   location_t loc = gimple_location (stmt);
7610   bool offloaded, data_region;
7611   unsigned int map_cnt = 0;
7612 
7613   offloaded = is_gimple_omp_offloaded (stmt);
7614   switch (gimple_omp_target_kind (stmt))
7615     {
7616     case GF_OMP_TARGET_KIND_REGION:
7617     case GF_OMP_TARGET_KIND_UPDATE:
7618     case GF_OMP_TARGET_KIND_ENTER_DATA:
7619     case GF_OMP_TARGET_KIND_EXIT_DATA:
7620     case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7621     case GF_OMP_TARGET_KIND_OACC_KERNELS:
7622     case GF_OMP_TARGET_KIND_OACC_UPDATE:
7623     case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7624     case GF_OMP_TARGET_KIND_OACC_DECLARE:
7625       data_region = false;
7626       break;
7627     case GF_OMP_TARGET_KIND_DATA:
7628     case GF_OMP_TARGET_KIND_OACC_DATA:
7629     case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7630       data_region = true;
7631       break;
7632     default:
7633       gcc_unreachable ();
7634     }
7635 
7636   clauses = gimple_omp_target_clauses (stmt);
7637 
7638   gimple_seq dep_ilist = NULL;
7639   gimple_seq dep_olist = NULL;
7640   if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7641     {
7642       push_gimplify_context ();
7643       dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7644       lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7645 			    &dep_ilist, &dep_olist);
7646     }
7647 
7648   tgt_bind = NULL;
7649   tgt_body = NULL;
7650   if (offloaded)
7651     {
7652       tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7653       tgt_body = gimple_bind_body (tgt_bind);
7654     }
7655   else if (data_region)
7656     tgt_body = gimple_omp_body (stmt);
7657   child_fn = ctx->cb.dst_fn;
7658 
7659   push_gimplify_context ();
7660   fplist = NULL;
7661 
7662   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7663     switch (OMP_CLAUSE_CODE (c))
7664       {
7665 	tree var, x;
7666 
7667       default:
7668 	break;
7669       case OMP_CLAUSE_MAP:
7670 #if CHECKING_P
7671 	/* First check what we're prepared to handle in the following.  */
7672 	switch (OMP_CLAUSE_MAP_KIND (c))
7673 	  {
7674 	  case GOMP_MAP_ALLOC:
7675 	  case GOMP_MAP_TO:
7676 	  case GOMP_MAP_FROM:
7677 	  case GOMP_MAP_TOFROM:
7678 	  case GOMP_MAP_POINTER:
7679 	  case GOMP_MAP_TO_PSET:
7680 	  case GOMP_MAP_DELETE:
7681 	  case GOMP_MAP_RELEASE:
7682 	  case GOMP_MAP_ALWAYS_TO:
7683 	  case GOMP_MAP_ALWAYS_FROM:
7684 	  case GOMP_MAP_ALWAYS_TOFROM:
7685 	  case GOMP_MAP_FIRSTPRIVATE_POINTER:
7686 	  case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7687 	  case GOMP_MAP_STRUCT:
7688 	  case GOMP_MAP_ALWAYS_POINTER:
7689 	    break;
7690 	  case GOMP_MAP_FORCE_ALLOC:
7691 	  case GOMP_MAP_FORCE_TO:
7692 	  case GOMP_MAP_FORCE_FROM:
7693 	  case GOMP_MAP_FORCE_TOFROM:
7694 	  case GOMP_MAP_FORCE_PRESENT:
7695 	  case GOMP_MAP_FORCE_DEVICEPTR:
7696 	  case GOMP_MAP_DEVICE_RESIDENT:
7697 	  case GOMP_MAP_LINK:
7698 	    gcc_assert (is_gimple_omp_oacc (stmt));
7699 	    break;
7700 	  default:
7701 	    gcc_unreachable ();
7702 	  }
7703 #endif
7704 	  /* FALLTHRU */
7705       case OMP_CLAUSE_TO:
7706       case OMP_CLAUSE_FROM:
7707       oacc_firstprivate:
7708 	var = OMP_CLAUSE_DECL (c);
7709 	if (!DECL_P (var))
7710 	  {
7711 	    if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7712 		|| (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7713 		    && (OMP_CLAUSE_MAP_KIND (c)
7714 			!= GOMP_MAP_FIRSTPRIVATE_POINTER)))
7715 	      map_cnt++;
7716 	    continue;
7717 	  }
7718 
7719 	if (DECL_SIZE (var)
7720 	    && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7721 	  {
7722 	    tree var2 = DECL_VALUE_EXPR (var);
7723 	    gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7724 	    var2 = TREE_OPERAND (var2, 0);
7725 	    gcc_assert (DECL_P (var2));
7726 	    var = var2;
7727 	  }
7728 
7729 	if (offloaded
7730 	    && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7731 	    && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7732 		|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7733 	  {
7734 	    if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7735 	      {
7736 		if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7737 		    && varpool_node::get_create (var)->offloadable)
7738 		  continue;
7739 
7740 		tree type = build_pointer_type (TREE_TYPE (var));
7741 		tree new_var = lookup_decl (var, ctx);
7742 		x = create_tmp_var_raw (type, get_name (new_var));
7743 		gimple_add_tmp_var (x);
7744 		x = build_simple_mem_ref (x);
7745 		SET_DECL_VALUE_EXPR (new_var, x);
7746 		DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7747 	      }
7748 	    continue;
7749 	  }
7750 
7751 	if (!maybe_lookup_field (var, ctx))
7752 	  continue;
7753 
7754 	/* Don't remap oacc parallel reduction variables, because the
7755 	   intermediate result must be local to each gang.  */
7756 	if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7757 			   && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7758 	  {
7759 	    x = build_receiver_ref (var, true, ctx);
7760 	    tree new_var = lookup_decl (var, ctx);
7761 
7762 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7763 		&& OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7764 		&& !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7765 		&& TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7766 	      x = build_simple_mem_ref (x);
7767 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7768 	      {
7769 		gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7770 		if (omp_is_reference (new_var))
7771 		  {
7772 		    /* Create a local object to hold the instance
7773 		       value.  */
7774 		    tree type = TREE_TYPE (TREE_TYPE (new_var));
7775 		    const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7776 		    tree inst = create_tmp_var (type, id);
7777 		    gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7778 		    x = build_fold_addr_expr (inst);
7779 		  }
7780 		gimplify_assign (new_var, x, &fplist);
7781 	      }
7782 	    else if (DECL_P (new_var))
7783 	      {
7784 		SET_DECL_VALUE_EXPR (new_var, x);
7785 		DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7786 	      }
7787 	    else
7788 	      gcc_unreachable ();
7789 	  }
7790 	map_cnt++;
7791 	break;
7792 
7793       case OMP_CLAUSE_FIRSTPRIVATE:
7794 	if (is_oacc_parallel (ctx))
7795 	  goto oacc_firstprivate;
7796 	map_cnt++;
7797 	var = OMP_CLAUSE_DECL (c);
7798 	if (!omp_is_reference (var)
7799 	    && !is_gimple_reg_type (TREE_TYPE (var)))
7800 	  {
7801 	    tree new_var = lookup_decl (var, ctx);
7802 	    if (is_variable_sized (var))
7803 	      {
7804 		tree pvar = DECL_VALUE_EXPR (var);
7805 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7806 		pvar = TREE_OPERAND (pvar, 0);
7807 		gcc_assert (DECL_P (pvar));
7808 		tree new_pvar = lookup_decl (pvar, ctx);
7809 		x = build_fold_indirect_ref (new_pvar);
7810 		TREE_THIS_NOTRAP (x) = 1;
7811 	      }
7812 	    else
7813 	      x = build_receiver_ref (var, true, ctx);
7814 	    SET_DECL_VALUE_EXPR (new_var, x);
7815 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7816 	  }
7817 	break;
7818 
7819       case OMP_CLAUSE_PRIVATE:
7820 	if (is_gimple_omp_oacc (ctx->stmt))
7821 	  break;
7822 	var = OMP_CLAUSE_DECL (c);
7823 	if (is_variable_sized (var))
7824 	  {
7825 	    tree new_var = lookup_decl (var, ctx);
7826 	    tree pvar = DECL_VALUE_EXPR (var);
7827 	    gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7828 	    pvar = TREE_OPERAND (pvar, 0);
7829 	    gcc_assert (DECL_P (pvar));
7830 	    tree new_pvar = lookup_decl (pvar, ctx);
7831 	    x = build_fold_indirect_ref (new_pvar);
7832 	    TREE_THIS_NOTRAP (x) = 1;
7833 	    SET_DECL_VALUE_EXPR (new_var, x);
7834 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7835 	  }
7836 	break;
7837 
7838       case OMP_CLAUSE_USE_DEVICE_PTR:
7839       case OMP_CLAUSE_IS_DEVICE_PTR:
7840 	var = OMP_CLAUSE_DECL (c);
7841 	map_cnt++;
7842 	if (is_variable_sized (var))
7843 	  {
7844 	    tree new_var = lookup_decl (var, ctx);
7845 	    tree pvar = DECL_VALUE_EXPR (var);
7846 	    gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7847 	    pvar = TREE_OPERAND (pvar, 0);
7848 	    gcc_assert (DECL_P (pvar));
7849 	    tree new_pvar = lookup_decl (pvar, ctx);
7850 	    x = build_fold_indirect_ref (new_pvar);
7851 	    TREE_THIS_NOTRAP (x) = 1;
7852 	    SET_DECL_VALUE_EXPR (new_var, x);
7853 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7854 	  }
7855 	else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7856 	  {
7857 	    tree new_var = lookup_decl (var, ctx);
7858 	    tree type = build_pointer_type (TREE_TYPE (var));
7859 	    x = create_tmp_var_raw (type, get_name (new_var));
7860 	    gimple_add_tmp_var (x);
7861 	    x = build_simple_mem_ref (x);
7862 	    SET_DECL_VALUE_EXPR (new_var, x);
7863 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7864 	  }
7865 	else
7866 	  {
7867 	    tree new_var = lookup_decl (var, ctx);
7868 	    x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7869 	    gimple_add_tmp_var (x);
7870 	    SET_DECL_VALUE_EXPR (new_var, x);
7871 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7872 	  }
7873 	break;
7874       }
7875 
7876   if (offloaded)
7877     {
7878       target_nesting_level++;
7879       lower_omp (&tgt_body, ctx);
7880       target_nesting_level--;
7881     }
7882   else if (data_region)
7883     lower_omp (&tgt_body, ctx);
7884 
7885   if (offloaded)
7886     {
7887       /* Declare all the variables created by mapping and the variables
7888 	 declared in the scope of the target body.  */
7889       record_vars_into (ctx->block_vars, child_fn);
7890       maybe_remove_omp_member_access_dummy_vars (tgt_bind);
7891       record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7892     }
7893 
7894   olist = NULL;
7895   ilist = NULL;
7896   if (ctx->record_type)
7897     {
7898       ctx->sender_decl
7899 	= create_tmp_var (ctx->record_type, ".omp_data_arr");
7900       DECL_NAMELESS (ctx->sender_decl) = 1;
7901       TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7902       t = make_tree_vec (3);
7903       TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7904       TREE_VEC_ELT (t, 1)
7905 	= create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7906 			  ".omp_data_sizes");
7907       DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7908       TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7909       TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7910       tree tkind_type = short_unsigned_type_node;
7911       int talign_shift = 8;
7912       TREE_VEC_ELT (t, 2)
7913 	= create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7914 			  ".omp_data_kinds");
7915       DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7916       TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7917       TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7918       gimple_omp_target_set_data_arg (stmt, t);
7919 
7920       vec<constructor_elt, va_gc> *vsize;
7921       vec<constructor_elt, va_gc> *vkind;
7922       vec_alloc (vsize, map_cnt);
7923       vec_alloc (vkind, map_cnt);
7924       unsigned int map_idx = 0;
7925 
7926       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7927 	switch (OMP_CLAUSE_CODE (c))
7928 	  {
7929 	    tree ovar, nc, s, purpose, var, x, type;
7930 	    unsigned int talign;
7931 
7932 	  default:
7933 	    break;
7934 
7935 	  case OMP_CLAUSE_MAP:
7936 	  case OMP_CLAUSE_TO:
7937 	  case OMP_CLAUSE_FROM:
7938 	  oacc_firstprivate_map:
7939 	    nc = c;
7940 	    ovar = OMP_CLAUSE_DECL (c);
7941 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7942 		&& (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7943 		    || (OMP_CLAUSE_MAP_KIND (c)
7944 			== GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7945 	      break;
7946 	    if (!DECL_P (ovar))
7947 	      {
7948 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7949 		    && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7950 		  {
7951 		    gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7952 					 == get_base_address (ovar));
7953 		    nc = OMP_CLAUSE_CHAIN (c);
7954 		    ovar = OMP_CLAUSE_DECL (nc);
7955 		  }
7956 		else
7957 		  {
7958 		    tree x = build_sender_ref (ovar, ctx);
7959 		    tree v
7960 		      = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7961 		    gimplify_assign (x, v, &ilist);
7962 		    nc = NULL_TREE;
7963 		  }
7964 	      }
7965 	    else
7966 	      {
7967 		if (DECL_SIZE (ovar)
7968 		    && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7969 		  {
7970 		    tree ovar2 = DECL_VALUE_EXPR (ovar);
7971 		    gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7972 		    ovar2 = TREE_OPERAND (ovar2, 0);
7973 		    gcc_assert (DECL_P (ovar2));
7974 		    ovar = ovar2;
7975 		  }
7976 		if (!maybe_lookup_field (ovar, ctx))
7977 		  continue;
7978 	      }
7979 
7980 	    talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7981 	    if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7982 	      talign = DECL_ALIGN_UNIT (ovar);
7983 	    if (nc)
7984 	      {
7985 		var = lookup_decl_in_outer_ctx (ovar, ctx);
7986 		x = build_sender_ref (ovar, ctx);
7987 
7988 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7989 		    && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7990 		    && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7991 		    && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7992 		  {
7993 		    gcc_assert (offloaded);
7994 		    tree avar
7995 		      = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7996 		    mark_addressable (avar);
7997 		    gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7998 		    talign = DECL_ALIGN_UNIT (avar);
7999 		    avar = build_fold_addr_expr (avar);
8000 		    gimplify_assign (x, avar, &ilist);
8001 		  }
8002 		else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
8003 		  {
8004 		    gcc_assert (is_gimple_omp_oacc (ctx->stmt));
8005 		    if (!omp_is_reference (var))
8006 		      {
8007 			if (is_gimple_reg (var)
8008 			    && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8009 			  TREE_NO_WARNING (var) = 1;
8010 			var = build_fold_addr_expr (var);
8011 		      }
8012 		    else
8013 		      talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8014 		    gimplify_assign (x, var, &ilist);
8015 		  }
8016 		else if (is_gimple_reg (var))
8017 		  {
8018 		    gcc_assert (offloaded);
8019 		    tree avar = create_tmp_var (TREE_TYPE (var));
8020 		    mark_addressable (avar);
8021 		    enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
8022 		    if (GOMP_MAP_COPY_TO_P (map_kind)
8023 			|| map_kind == GOMP_MAP_POINTER
8024 			|| map_kind == GOMP_MAP_TO_PSET
8025 			|| map_kind == GOMP_MAP_FORCE_DEVICEPTR)
8026 		      {
8027 			/* If we need to initialize a temporary
8028 			   with VAR because it is not addressable, and
8029 			   the variable hasn't been initialized yet, then
8030 			   we'll get a warning for the store to avar.
8031 			   Don't warn in that case, the mapping might
8032 			   be implicit.  */
8033 			TREE_NO_WARNING (var) = 1;
8034 			gimplify_assign (avar, var, &ilist);
8035 		      }
8036 		    avar = build_fold_addr_expr (avar);
8037 		    gimplify_assign (x, avar, &ilist);
8038 		    if ((GOMP_MAP_COPY_FROM_P (map_kind)
8039 			 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
8040 			&& !TYPE_READONLY (TREE_TYPE (var)))
8041 		      {
8042 			x = unshare_expr (x);
8043 			x = build_simple_mem_ref (x);
8044 			gimplify_assign (var, x, &olist);
8045 		      }
8046 		  }
8047 		else
8048 		  {
8049 		    var = build_fold_addr_expr (var);
8050 		    gimplify_assign (x, var, &ilist);
8051 		  }
8052 	      }
8053 	    s = NULL_TREE;
8054 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
8055 	      {
8056 		gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8057 		s = TREE_TYPE (ovar);
8058 		if (TREE_CODE (s) == REFERENCE_TYPE)
8059 		  s = TREE_TYPE (s);
8060 		s = TYPE_SIZE_UNIT (s);
8061 	      }
8062 	    else
8063 	      s = OMP_CLAUSE_SIZE (c);
8064 	    if (s == NULL_TREE)
8065 	      s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8066 	    s = fold_convert (size_type_node, s);
8067 	    purpose = size_int (map_idx++);
8068 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8069 	    if (TREE_CODE (s) != INTEGER_CST)
8070 	      TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8071 
8072 	    unsigned HOST_WIDE_INT tkind, tkind_zero;
8073 	    switch (OMP_CLAUSE_CODE (c))
8074 	      {
8075 	      case OMP_CLAUSE_MAP:
8076 		tkind = OMP_CLAUSE_MAP_KIND (c);
8077 		tkind_zero = tkind;
8078 		if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
8079 		  switch (tkind)
8080 		    {
8081 		    case GOMP_MAP_ALLOC:
8082 		    case GOMP_MAP_TO:
8083 		    case GOMP_MAP_FROM:
8084 		    case GOMP_MAP_TOFROM:
8085 		    case GOMP_MAP_ALWAYS_TO:
8086 		    case GOMP_MAP_ALWAYS_FROM:
8087 		    case GOMP_MAP_ALWAYS_TOFROM:
8088 		    case GOMP_MAP_RELEASE:
8089 		    case GOMP_MAP_FORCE_TO:
8090 		    case GOMP_MAP_FORCE_FROM:
8091 		    case GOMP_MAP_FORCE_TOFROM:
8092 		    case GOMP_MAP_FORCE_PRESENT:
8093 		      tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
8094 		      break;
8095 		    case GOMP_MAP_DELETE:
8096 		      tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
8097 		    default:
8098 		      break;
8099 		    }
8100 		if (tkind_zero != tkind)
8101 		  {
8102 		    if (integer_zerop (s))
8103 		      tkind = tkind_zero;
8104 		    else if (integer_nonzerop (s))
8105 		      tkind_zero = tkind;
8106 		  }
8107 		break;
8108 	      case OMP_CLAUSE_FIRSTPRIVATE:
8109 		gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8110 		tkind = GOMP_MAP_TO;
8111 		tkind_zero = tkind;
8112 		break;
8113 	      case OMP_CLAUSE_TO:
8114 		tkind = GOMP_MAP_TO;
8115 		tkind_zero = tkind;
8116 		break;
8117 	      case OMP_CLAUSE_FROM:
8118 		tkind = GOMP_MAP_FROM;
8119 		tkind_zero = tkind;
8120 		break;
8121 	      default:
8122 		gcc_unreachable ();
8123 	      }
8124 	    gcc_checking_assert (tkind
8125 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8126 	    gcc_checking_assert (tkind_zero
8127 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8128 	    talign = ceil_log2 (talign);
8129 	    tkind |= talign << talign_shift;
8130 	    tkind_zero |= talign << talign_shift;
8131 	    gcc_checking_assert (tkind
8132 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8133 	    gcc_checking_assert (tkind_zero
8134 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8135 	    if (tkind == tkind_zero)
8136 	      x = build_int_cstu (tkind_type, tkind);
8137 	    else
8138 	      {
8139 		TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8140 		x = build3 (COND_EXPR, tkind_type,
8141 			    fold_build2 (EQ_EXPR, boolean_type_node,
8142 					 unshare_expr (s), size_zero_node),
8143 			    build_int_cstu (tkind_type, tkind_zero),
8144 			    build_int_cstu (tkind_type, tkind));
8145 	      }
8146 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8147 	    if (nc && nc != c)
8148 	      c = nc;
8149 	    break;
8150 
8151 	  case OMP_CLAUSE_FIRSTPRIVATE:
8152 	    if (is_oacc_parallel (ctx))
8153 	      goto oacc_firstprivate_map;
8154 	    ovar = OMP_CLAUSE_DECL (c);
8155 	    if (omp_is_reference (ovar))
8156 	      talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8157 	    else
8158 	      talign = DECL_ALIGN_UNIT (ovar);
8159 	    var = lookup_decl_in_outer_ctx (ovar, ctx);
8160 	    x = build_sender_ref (ovar, ctx);
8161 	    tkind = GOMP_MAP_FIRSTPRIVATE;
8162 	    type = TREE_TYPE (ovar);
8163 	    if (omp_is_reference (ovar))
8164 	      type = TREE_TYPE (type);
8165 	    if ((INTEGRAL_TYPE_P (type)
8166 		 && TYPE_PRECISION (type) <= POINTER_SIZE)
8167 		|| TREE_CODE (type) == POINTER_TYPE)
8168 	      {
8169 		tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8170 		tree t = var;
8171 		if (omp_is_reference (var))
8172 		  t = build_simple_mem_ref (var);
8173 		else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8174 		  TREE_NO_WARNING (var) = 1;
8175 		if (TREE_CODE (type) != POINTER_TYPE)
8176 		  t = fold_convert (pointer_sized_int_node, t);
8177 		t = fold_convert (TREE_TYPE (x), t);
8178 		gimplify_assign (x, t, &ilist);
8179 	      }
8180 	    else if (omp_is_reference (var))
8181 	      gimplify_assign (x, var, &ilist);
8182 	    else if (is_gimple_reg (var))
8183 	      {
8184 		tree avar = create_tmp_var (TREE_TYPE (var));
8185 		mark_addressable (avar);
8186 		if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8187 		  TREE_NO_WARNING (var) = 1;
8188 		gimplify_assign (avar, var, &ilist);
8189 		avar = build_fold_addr_expr (avar);
8190 		gimplify_assign (x, avar, &ilist);
8191 	      }
8192 	    else
8193 	      {
8194 		var = build_fold_addr_expr (var);
8195 		gimplify_assign (x, var, &ilist);
8196 	      }
8197 	    if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8198 	      s = size_int (0);
8199 	    else if (omp_is_reference (ovar))
8200 	      s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8201 	    else
8202 	      s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8203 	    s = fold_convert (size_type_node, s);
8204 	    purpose = size_int (map_idx++);
8205 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8206 	    if (TREE_CODE (s) != INTEGER_CST)
8207 	      TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8208 
8209 	    gcc_checking_assert (tkind
8210 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8211 	    talign = ceil_log2 (talign);
8212 	    tkind |= talign << talign_shift;
8213 	    gcc_checking_assert (tkind
8214 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8215 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8216 				    build_int_cstu (tkind_type, tkind));
8217 	    break;
8218 
8219 	  case OMP_CLAUSE_USE_DEVICE_PTR:
8220 	  case OMP_CLAUSE_IS_DEVICE_PTR:
8221 	    ovar = OMP_CLAUSE_DECL (c);
8222 	    var = lookup_decl_in_outer_ctx (ovar, ctx);
8223 	    x = build_sender_ref (ovar, ctx);
8224 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8225 	      tkind = GOMP_MAP_USE_DEVICE_PTR;
8226 	    else
8227 	      tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8228 	    type = TREE_TYPE (ovar);
8229 	    if (TREE_CODE (type) == ARRAY_TYPE)
8230 	      var = build_fold_addr_expr (var);
8231 	    else
8232 	      {
8233 		if (omp_is_reference (ovar))
8234 		  {
8235 		    type = TREE_TYPE (type);
8236 		    if (TREE_CODE (type) != ARRAY_TYPE)
8237 		      var = build_simple_mem_ref (var);
8238 		    var = fold_convert (TREE_TYPE (x), var);
8239 		  }
8240 	      }
8241 	    gimplify_assign (x, var, &ilist);
8242 	    s = size_int (0);
8243 	    purpose = size_int (map_idx++);
8244 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8245 	    gcc_checking_assert (tkind
8246 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8247 	    gcc_checking_assert (tkind
8248 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8249 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8250 				    build_int_cstu (tkind_type, tkind));
8251 	    break;
8252 	  }
8253 
8254       gcc_assert (map_idx == map_cnt);
8255 
8256       DECL_INITIAL (TREE_VEC_ELT (t, 1))
8257 	= build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8258       DECL_INITIAL (TREE_VEC_ELT (t, 2))
8259 	= build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8260       for (int i = 1; i <= 2; i++)
8261 	if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8262 	  {
8263 	    gimple_seq initlist = NULL;
8264 	    force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8265 					  TREE_VEC_ELT (t, i)),
8266 				  &initlist, true, NULL_TREE);
8267 	    gimple_seq_add_seq (&ilist, initlist);
8268 
8269 	    tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8270 					      NULL);
8271 	    TREE_THIS_VOLATILE (clobber) = 1;
8272 	    gimple_seq_add_stmt (&olist,
8273 				 gimple_build_assign (TREE_VEC_ELT (t, i),
8274 						      clobber));
8275 	  }
8276 
8277       tree clobber = build_constructor (ctx->record_type, NULL);
8278       TREE_THIS_VOLATILE (clobber) = 1;
8279       gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8280 							clobber));
8281     }
8282 
8283   /* Once all the expansions are done, sequence all the different
8284      fragments inside gimple_omp_body.  */
8285 
8286   new_body = NULL;
8287 
8288   if (offloaded
8289       && ctx->record_type)
8290     {
8291       t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8292       /* fixup_child_record_type might have changed receiver_decl's type.  */
8293       t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8294       gimple_seq_add_stmt (&new_body,
8295 	  		   gimple_build_assign (ctx->receiver_decl, t));
8296     }
8297   gimple_seq_add_seq (&new_body, fplist);
8298 
8299   if (offloaded || data_region)
8300     {
8301       tree prev = NULL_TREE;
8302       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8303 	switch (OMP_CLAUSE_CODE (c))
8304 	  {
8305 	    tree var, x;
8306 	  default:
8307 	    break;
8308 	  case OMP_CLAUSE_FIRSTPRIVATE:
8309 	    if (is_gimple_omp_oacc (ctx->stmt))
8310 	      break;
8311 	    var = OMP_CLAUSE_DECL (c);
8312 	    if (omp_is_reference (var)
8313 		|| is_gimple_reg_type (TREE_TYPE (var)))
8314 	      {
8315 		tree new_var = lookup_decl (var, ctx);
8316 		tree type;
8317 		type = TREE_TYPE (var);
8318 		if (omp_is_reference (var))
8319 		  type = TREE_TYPE (type);
8320 		if ((INTEGRAL_TYPE_P (type)
8321 		     && TYPE_PRECISION (type) <= POINTER_SIZE)
8322 		    || TREE_CODE (type) == POINTER_TYPE)
8323 		  {
8324 		    x = build_receiver_ref (var, false, ctx);
8325 		    if (TREE_CODE (type) != POINTER_TYPE)
8326 		      x = fold_convert (pointer_sized_int_node, x);
8327 		    x = fold_convert (type, x);
8328 		    gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8329 				   fb_rvalue);
8330 		    if (omp_is_reference (var))
8331 		      {
8332 			tree v = create_tmp_var_raw (type, get_name (var));
8333 			gimple_add_tmp_var (v);
8334 			TREE_ADDRESSABLE (v) = 1;
8335 			gimple_seq_add_stmt (&new_body,
8336 					     gimple_build_assign (v, x));
8337 			x = build_fold_addr_expr (v);
8338 		      }
8339 		    gimple_seq_add_stmt (&new_body,
8340 					 gimple_build_assign (new_var, x));
8341 		  }
8342 		else
8343 		  {
8344 		    x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8345 		    gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8346 				   fb_rvalue);
8347 		    gimple_seq_add_stmt (&new_body,
8348 					 gimple_build_assign (new_var, x));
8349 		  }
8350 	      }
8351 	    else if (is_variable_sized (var))
8352 	      {
8353 		tree pvar = DECL_VALUE_EXPR (var);
8354 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8355 		pvar = TREE_OPERAND (pvar, 0);
8356 		gcc_assert (DECL_P (pvar));
8357 		tree new_var = lookup_decl (pvar, ctx);
8358 		x = build_receiver_ref (var, false, ctx);
8359 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8360 		gimple_seq_add_stmt (&new_body,
8361 				     gimple_build_assign (new_var, x));
8362 	      }
8363 	    break;
8364 	  case OMP_CLAUSE_PRIVATE:
8365 	    if (is_gimple_omp_oacc (ctx->stmt))
8366 	      break;
8367 	    var = OMP_CLAUSE_DECL (c);
8368 	    if (omp_is_reference (var))
8369 	      {
8370 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8371 		tree new_var = lookup_decl (var, ctx);
8372 		x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8373 		if (TREE_CONSTANT (x))
8374 		  {
8375 		    x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8376 					    get_name (var));
8377 		    gimple_add_tmp_var (x);
8378 		    TREE_ADDRESSABLE (x) = 1;
8379 		    x = build_fold_addr_expr_loc (clause_loc, x);
8380 		  }
8381 		else
8382 		  break;
8383 
8384 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8385 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8386 		gimple_seq_add_stmt (&new_body,
8387 				     gimple_build_assign (new_var, x));
8388 	      }
8389 	    break;
8390 	  case OMP_CLAUSE_USE_DEVICE_PTR:
8391 	  case OMP_CLAUSE_IS_DEVICE_PTR:
8392 	    var = OMP_CLAUSE_DECL (c);
8393 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8394 	      x = build_sender_ref (var, ctx);
8395 	    else
8396 	      x = build_receiver_ref (var, false, ctx);
8397 	    if (is_variable_sized (var))
8398 	      {
8399 		tree pvar = DECL_VALUE_EXPR (var);
8400 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8401 		pvar = TREE_OPERAND (pvar, 0);
8402 		gcc_assert (DECL_P (pvar));
8403 		tree new_var = lookup_decl (pvar, ctx);
8404 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8405 		gimple_seq_add_stmt (&new_body,
8406 				     gimple_build_assign (new_var, x));
8407 	      }
8408 	    else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8409 	      {
8410 		tree new_var = lookup_decl (var, ctx);
8411 		new_var = DECL_VALUE_EXPR (new_var);
8412 		gcc_assert (TREE_CODE (new_var) == MEM_REF);
8413 		new_var = TREE_OPERAND (new_var, 0);
8414 		gcc_assert (DECL_P (new_var));
8415 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8416 		gimple_seq_add_stmt (&new_body,
8417 				     gimple_build_assign (new_var, x));
8418 	      }
8419 	    else
8420 	      {
8421 		tree type = TREE_TYPE (var);
8422 		tree new_var = lookup_decl (var, ctx);
8423 		if (omp_is_reference (var))
8424 		  {
8425 		    type = TREE_TYPE (type);
8426 		    if (TREE_CODE (type) != ARRAY_TYPE)
8427 		      {
8428 			tree v = create_tmp_var_raw (type, get_name (var));
8429 			gimple_add_tmp_var (v);
8430 			TREE_ADDRESSABLE (v) = 1;
8431 			x = fold_convert (type, x);
8432 			gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8433 				       fb_rvalue);
8434 			gimple_seq_add_stmt (&new_body,
8435 					     gimple_build_assign (v, x));
8436 			x = build_fold_addr_expr (v);
8437 		      }
8438 		  }
8439 		new_var = DECL_VALUE_EXPR (new_var);
8440 		x = fold_convert (TREE_TYPE (new_var), x);
8441 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8442 		gimple_seq_add_stmt (&new_body,
8443 				     gimple_build_assign (new_var, x));
8444 	      }
8445 	    break;
8446 	  }
8447       /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8448 	 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8449 	 are already handled.  Similarly OMP_CLAUSE_PRIVATE for VLAs
8450 	 or references to VLAs.  */
8451       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8452 	switch (OMP_CLAUSE_CODE (c))
8453 	  {
8454 	    tree var;
8455 	  default:
8456 	    break;
8457 	  case OMP_CLAUSE_MAP:
8458 	    if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8459 		|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8460 	      {
8461 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8462 		HOST_WIDE_INT offset = 0;
8463 		gcc_assert (prev);
8464 		var = OMP_CLAUSE_DECL (c);
8465 		if (DECL_P (var)
8466 		    && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8467 		    && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8468 								      ctx))
8469 		    && varpool_node::get_create (var)->offloadable)
8470 		  break;
8471 		if (TREE_CODE (var) == INDIRECT_REF
8472 		    && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8473 		  var = TREE_OPERAND (var, 0);
8474 		if (TREE_CODE (var) == COMPONENT_REF)
8475 		  {
8476 		    var = get_addr_base_and_unit_offset (var, &offset);
8477 		    gcc_assert (var != NULL_TREE && DECL_P (var));
8478 		  }
8479 		else if (DECL_SIZE (var)
8480 			 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8481 		  {
8482 		    tree var2 = DECL_VALUE_EXPR (var);
8483 		    gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8484 		    var2 = TREE_OPERAND (var2, 0);
8485 		    gcc_assert (DECL_P (var2));
8486 		    var = var2;
8487 		  }
8488 		tree new_var = lookup_decl (var, ctx), x;
8489 		tree type = TREE_TYPE (new_var);
8490 		bool is_ref;
8491 		if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8492 		    && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8493 			== COMPONENT_REF))
8494 		  {
8495 		    type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8496 		    is_ref = true;
8497 		    new_var = build2 (MEM_REF, type,
8498 				      build_fold_addr_expr (new_var),
8499 				      build_int_cst (build_pointer_type (type),
8500 						     offset));
8501 		  }
8502 		else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8503 		  {
8504 		    type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8505 		    is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8506 		    new_var = build2 (MEM_REF, type,
8507 				      build_fold_addr_expr (new_var),
8508 				      build_int_cst (build_pointer_type (type),
8509 						     offset));
8510 		  }
8511 		else
8512 		  is_ref = omp_is_reference (var);
8513 		if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8514 		  is_ref = false;
8515 		bool ref_to_array = false;
8516 		if (is_ref)
8517 		  {
8518 		    type = TREE_TYPE (type);
8519 		    if (TREE_CODE (type) == ARRAY_TYPE)
8520 		      {
8521 			type = build_pointer_type (type);
8522 			ref_to_array = true;
8523 		      }
8524 		  }
8525 		else if (TREE_CODE (type) == ARRAY_TYPE)
8526 		  {
8527 		    tree decl2 = DECL_VALUE_EXPR (new_var);
8528 		    gcc_assert (TREE_CODE (decl2) == MEM_REF);
8529 		    decl2 = TREE_OPERAND (decl2, 0);
8530 		    gcc_assert (DECL_P (decl2));
8531 		    new_var = decl2;
8532 		    type = TREE_TYPE (new_var);
8533 		  }
8534 		x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8535 		x = fold_convert_loc (clause_loc, type, x);
8536 		if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8537 		  {
8538 		    tree bias = OMP_CLAUSE_SIZE (c);
8539 		    if (DECL_P (bias))
8540 		      bias = lookup_decl (bias, ctx);
8541 		    bias = fold_convert_loc (clause_loc, sizetype, bias);
8542 		    bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8543 					    bias);
8544 		    x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8545 					 TREE_TYPE (x), x, bias);
8546 		  }
8547 		if (ref_to_array)
8548 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8549 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8550 		if (is_ref && !ref_to_array)
8551 		  {
8552 		    tree t = create_tmp_var_raw (type, get_name (var));
8553 		    gimple_add_tmp_var (t);
8554 		    TREE_ADDRESSABLE (t) = 1;
8555 		    gimple_seq_add_stmt (&new_body,
8556 					 gimple_build_assign (t, x));
8557 		    x = build_fold_addr_expr_loc (clause_loc, t);
8558 		  }
8559 		gimple_seq_add_stmt (&new_body,
8560 				     gimple_build_assign (new_var, x));
8561 		prev = NULL_TREE;
8562 	      }
8563 	    else if (OMP_CLAUSE_CHAIN (c)
8564 		     && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8565 			== OMP_CLAUSE_MAP
8566 		     && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8567 			 == GOMP_MAP_FIRSTPRIVATE_POINTER
8568 			 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8569 			     == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8570 	      prev = c;
8571 	    break;
8572 	  case OMP_CLAUSE_PRIVATE:
8573 	    var = OMP_CLAUSE_DECL (c);
8574 	    if (is_variable_sized (var))
8575 	      {
8576 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8577 		tree new_var = lookup_decl (var, ctx);
8578 		tree pvar = DECL_VALUE_EXPR (var);
8579 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8580 		pvar = TREE_OPERAND (pvar, 0);
8581 		gcc_assert (DECL_P (pvar));
8582 		tree new_pvar = lookup_decl (pvar, ctx);
8583 		tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8584 		tree al = size_int (DECL_ALIGN (var));
8585 		tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8586 		x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8587 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8588 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8589 		gimple_seq_add_stmt (&new_body,
8590 				     gimple_build_assign (new_pvar, x));
8591 	      }
8592 	    else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8593 	      {
8594 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8595 		tree new_var = lookup_decl (var, ctx);
8596 		tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8597 		if (TREE_CONSTANT (x))
8598 		  break;
8599 		else
8600 		  {
8601 		    tree atmp
8602 		      = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8603 		    tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8604 		    tree al = size_int (TYPE_ALIGN (rtype));
8605 		    x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8606 		  }
8607 
8608 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8609 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8610 		gimple_seq_add_stmt (&new_body,
8611 				     gimple_build_assign (new_var, x));
8612 	      }
8613 	    break;
8614 	  }
8615 
8616       gimple_seq fork_seq = NULL;
8617       gimple_seq join_seq = NULL;
8618 
8619       if (is_oacc_parallel (ctx))
8620 	{
8621 	  /* If there are reductions on the offloaded region itself, treat
8622 	     them as a dummy GANG loop.  */
8623 	  tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8624 
8625 	  lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8626 				 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8627 	}
8628 
8629       gimple_seq_add_seq (&new_body, fork_seq);
8630       gimple_seq_add_seq (&new_body, tgt_body);
8631       gimple_seq_add_seq (&new_body, join_seq);
8632 
8633       if (offloaded)
8634 	new_body = maybe_catch_exception (new_body);
8635 
8636       gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8637       gimple_omp_set_body (stmt, new_body);
8638     }
8639 
8640   bind = gimple_build_bind (NULL, NULL,
8641 			    tgt_bind ? gimple_bind_block (tgt_bind)
8642 				     : NULL_TREE);
8643   gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8644   gimple_bind_add_seq (bind, ilist);
8645   gimple_bind_add_stmt (bind, stmt);
8646   gimple_bind_add_seq (bind, olist);
8647 
8648   pop_gimplify_context (NULL);
8649 
8650   if (dep_bind)
8651     {
8652       gimple_bind_add_seq (dep_bind, dep_ilist);
8653       gimple_bind_add_stmt (dep_bind, bind);
8654       gimple_bind_add_seq (dep_bind, dep_olist);
8655       pop_gimplify_context (dep_bind);
8656     }
8657 }
8658 
8659 /* Expand code for an OpenMP teams directive.  */
8660 
8661 static void
8662 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8663 {
8664   gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8665   push_gimplify_context ();
8666 
8667   tree block = make_node (BLOCK);
8668   gbind *bind = gimple_build_bind (NULL, NULL, block);
8669   gsi_replace (gsi_p, bind, true);
8670   gimple_seq bind_body = NULL;
8671   gimple_seq dlist = NULL;
8672   gimple_seq olist = NULL;
8673 
8674   tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8675 				    OMP_CLAUSE_NUM_TEAMS);
8676   if (num_teams == NULL_TREE)
8677     num_teams = build_int_cst (unsigned_type_node, 0);
8678   else
8679     {
8680       num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8681       num_teams = fold_convert (unsigned_type_node, num_teams);
8682       gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8683     }
8684   tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8685 				       OMP_CLAUSE_THREAD_LIMIT);
8686   if (thread_limit == NULL_TREE)
8687     thread_limit = build_int_cst (unsigned_type_node, 0);
8688   else
8689     {
8690       thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8691       thread_limit = fold_convert (unsigned_type_node, thread_limit);
8692       gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8693 		     fb_rvalue);
8694     }
8695 
8696   lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8697 			   &bind_body, &dlist, ctx, NULL);
8698   lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8699   lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8700   if (!gimple_omp_teams_grid_phony (teams_stmt))
8701     {
8702       gimple_seq_add_stmt (&bind_body, teams_stmt);
8703       location_t loc = gimple_location (teams_stmt);
8704       tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8705       gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8706       gimple_set_location (call, loc);
8707       gimple_seq_add_stmt (&bind_body, call);
8708     }
8709 
8710   gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8711   gimple_omp_set_body (teams_stmt, NULL);
8712   gimple_seq_add_seq (&bind_body, olist);
8713   gimple_seq_add_seq (&bind_body, dlist);
8714   if (!gimple_omp_teams_grid_phony (teams_stmt))
8715     gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8716   gimple_bind_set_body (bind, bind_body);
8717 
8718   pop_gimplify_context (bind);
8719 
8720   gimple_bind_append_vars (bind, ctx->block_vars);
8721   BLOCK_VARS (block) = ctx->block_vars;
8722   if (BLOCK_VARS (block))
8723     TREE_USED (block) = 1;
8724 }
8725 
8726 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct.  */
8727 
8728 static void
8729 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8730 {
8731   gimple *stmt = gsi_stmt (*gsi_p);
8732   lower_omp (gimple_omp_body_ptr (stmt), ctx);
8733   gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8734 		       gimple_build_omp_return (false));
8735 }
8736 
8737 
8738 /* Callback for lower_omp_1.  Return non-NULL if *tp needs to be
8739    regimplified.  If DATA is non-NULL, lower_omp_1 is outside
8740    of OMP context, but with task_shared_vars set.  */
8741 
8742 static tree
8743 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8744     			void *data)
8745 {
8746   tree t = *tp;
8747 
8748   /* Any variable with DECL_VALUE_EXPR needs to be regimplified.  */
8749   if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8750     return t;
8751 
8752   if (task_shared_vars
8753       && DECL_P (t)
8754       && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8755     return t;
8756 
8757   /* If a global variable has been privatized, TREE_CONSTANT on
8758      ADDR_EXPR might be wrong.  */
8759   if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8760     recompute_tree_invariant_for_addr_expr (t);
8761 
8762   *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8763   return NULL_TREE;
8764 }
8765 
8766 /* Data to be communicated between lower_omp_regimplify_operands and
8767    lower_omp_regimplify_operands_p.  */
8768 
8769 struct lower_omp_regimplify_operands_data
8770 {
8771   omp_context *ctx;
8772   vec<tree> *decls;
8773 };
8774 
8775 /* Helper function for lower_omp_regimplify_operands.  Find
8776    omp_member_access_dummy_var vars and adjust temporarily their
8777    DECL_VALUE_EXPRs if needed.  */
8778 
8779 static tree
8780 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8781 				 void *data)
8782 {
8783   tree t = omp_member_access_dummy_var (*tp);
8784   if (t)
8785     {
8786       struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8787       lower_omp_regimplify_operands_data *ldata
8788 	= (lower_omp_regimplify_operands_data *) wi->info;
8789       tree o = maybe_lookup_decl (t, ldata->ctx);
8790       if (o != t)
8791 	{
8792 	  ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8793 	  ldata->decls->safe_push (*tp);
8794 	  tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8795 	  SET_DECL_VALUE_EXPR (*tp, v);
8796 	}
8797     }
8798   *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8799   return NULL_TREE;
8800 }
8801 
8802 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8803    of omp_member_access_dummy_var vars during regimplification.  */
8804 
8805 static void
8806 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8807 			       gimple_stmt_iterator *gsi_p)
8808 {
8809   auto_vec<tree, 10> decls;
8810   if (ctx)
8811     {
8812       struct walk_stmt_info wi;
8813       memset (&wi, '\0', sizeof (wi));
8814       struct lower_omp_regimplify_operands_data data;
8815       data.ctx = ctx;
8816       data.decls = &decls;
8817       wi.info = &data;
8818       walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8819     }
8820   gimple_regimplify_operands (stmt, gsi_p);
8821   while (!decls.is_empty ())
8822     {
8823       tree t = decls.pop ();
8824       tree v = decls.pop ();
8825       SET_DECL_VALUE_EXPR (t, v);
8826     }
8827 }
8828 
8829 static void
8830 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8831 {
8832   gimple *stmt = gsi_stmt (*gsi_p);
8833   struct walk_stmt_info wi;
8834   gcall *call_stmt;
8835 
8836   if (gimple_has_location (stmt))
8837     input_location = gimple_location (stmt);
8838 
8839   if (task_shared_vars)
8840     memset (&wi, '\0', sizeof (wi));
8841 
8842   /* If we have issued syntax errors, avoid doing any heavy lifting.
8843      Just replace the OMP directives with a NOP to avoid
8844      confusing RTL expansion.  */
8845   if (seen_error () && is_gimple_omp (stmt))
8846     {
8847       gsi_replace (gsi_p, gimple_build_nop (), true);
8848       return;
8849     }
8850 
8851   switch (gimple_code (stmt))
8852     {
8853     case GIMPLE_COND:
8854       {
8855 	gcond *cond_stmt = as_a <gcond *> (stmt);
8856 	if ((ctx || task_shared_vars)
8857 	    && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8858 			   lower_omp_regimplify_p,
8859 			   ctx ? NULL : &wi, NULL)
8860 		|| walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8861 			      lower_omp_regimplify_p,
8862 			      ctx ? NULL : &wi, NULL)))
8863 	  lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8864       }
8865       break;
8866     case GIMPLE_CATCH:
8867       lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8868       break;
8869     case GIMPLE_EH_FILTER:
8870       lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8871       break;
8872     case GIMPLE_TRY:
8873       lower_omp (gimple_try_eval_ptr (stmt), ctx);
8874       lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8875       break;
8876     case GIMPLE_TRANSACTION:
8877       lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8878 		 ctx);
8879       break;
8880     case GIMPLE_BIND:
8881       lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8882       maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
8883       break;
8884     case GIMPLE_OMP_PARALLEL:
8885     case GIMPLE_OMP_TASK:
8886       ctx = maybe_lookup_ctx (stmt);
8887       gcc_assert (ctx);
8888       if (ctx->cancellable)
8889 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8890       lower_omp_taskreg (gsi_p, ctx);
8891       break;
8892     case GIMPLE_OMP_FOR:
8893       ctx = maybe_lookup_ctx (stmt);
8894       gcc_assert (ctx);
8895       if (ctx->cancellable)
8896 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8897       lower_omp_for (gsi_p, ctx);
8898       break;
8899     case GIMPLE_OMP_SECTIONS:
8900       ctx = maybe_lookup_ctx (stmt);
8901       gcc_assert (ctx);
8902       if (ctx->cancellable)
8903 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8904       lower_omp_sections (gsi_p, ctx);
8905       break;
8906     case GIMPLE_OMP_SINGLE:
8907       ctx = maybe_lookup_ctx (stmt);
8908       gcc_assert (ctx);
8909       lower_omp_single (gsi_p, ctx);
8910       break;
8911     case GIMPLE_OMP_MASTER:
8912       ctx = maybe_lookup_ctx (stmt);
8913       gcc_assert (ctx);
8914       lower_omp_master (gsi_p, ctx);
8915       break;
8916     case GIMPLE_OMP_TASKGROUP:
8917       ctx = maybe_lookup_ctx (stmt);
8918       gcc_assert (ctx);
8919       lower_omp_taskgroup (gsi_p, ctx);
8920       break;
8921     case GIMPLE_OMP_ORDERED:
8922       ctx = maybe_lookup_ctx (stmt);
8923       gcc_assert (ctx);
8924       lower_omp_ordered (gsi_p, ctx);
8925       break;
8926     case GIMPLE_OMP_CRITICAL:
8927       ctx = maybe_lookup_ctx (stmt);
8928       gcc_assert (ctx);
8929       lower_omp_critical (gsi_p, ctx);
8930       break;
8931     case GIMPLE_OMP_ATOMIC_LOAD:
8932       if ((ctx || task_shared_vars)
8933 	  && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8934 			  as_a <gomp_atomic_load *> (stmt)),
8935 			lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8936 	lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8937       break;
8938     case GIMPLE_OMP_TARGET:
8939       ctx = maybe_lookup_ctx (stmt);
8940       gcc_assert (ctx);
8941       lower_omp_target (gsi_p, ctx);
8942       break;
8943     case GIMPLE_OMP_TEAMS:
8944       ctx = maybe_lookup_ctx (stmt);
8945       gcc_assert (ctx);
8946       lower_omp_teams (gsi_p, ctx);
8947       break;
8948     case GIMPLE_OMP_GRID_BODY:
8949       ctx = maybe_lookup_ctx (stmt);
8950       gcc_assert (ctx);
8951       lower_omp_grid_body (gsi_p, ctx);
8952       break;
8953     case GIMPLE_CALL:
8954       tree fndecl;
8955       call_stmt = as_a <gcall *> (stmt);
8956       fndecl = gimple_call_fndecl (call_stmt);
8957       if (fndecl
8958 	  && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8959 	switch (DECL_FUNCTION_CODE (fndecl))
8960 	  {
8961 	  case BUILT_IN_GOMP_BARRIER:
8962 	    if (ctx == NULL)
8963 	      break;
8964 	    /* FALLTHRU */
8965 	  case BUILT_IN_GOMP_CANCEL:
8966 	  case BUILT_IN_GOMP_CANCELLATION_POINT:
8967 	    omp_context *cctx;
8968 	    cctx = ctx;
8969 	    if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8970 	      cctx = cctx->outer;
8971 	    gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8972 	    if (!cctx->cancellable)
8973 	      {
8974 		if (DECL_FUNCTION_CODE (fndecl)
8975 		    == BUILT_IN_GOMP_CANCELLATION_POINT)
8976 		  {
8977 		    stmt = gimple_build_nop ();
8978 		    gsi_replace (gsi_p, stmt, false);
8979 		  }
8980 		break;
8981 	      }
8982 	    if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8983 	      {
8984 		fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8985 		gimple_call_set_fndecl (call_stmt, fndecl);
8986 		gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8987 	      }
8988 	    tree lhs;
8989 	    lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8990 	    gimple_call_set_lhs (call_stmt, lhs);
8991 	    tree fallthru_label;
8992 	    fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8993 	    gimple *g;
8994 	    g = gimple_build_label (fallthru_label);
8995 	    gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8996 	    g = gimple_build_cond (NE_EXPR, lhs,
8997 				   fold_convert (TREE_TYPE (lhs),
8998 						 boolean_false_node),
8999 				   cctx->cancel_label, fallthru_label);
9000 	    gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
9001 	    break;
9002 	  default:
9003 	    break;
9004 	  }
9005       /* FALLTHRU */
9006     default:
9007       if ((ctx || task_shared_vars)
9008 	  && walk_gimple_op (stmt, lower_omp_regimplify_p,
9009 			     ctx ? NULL : &wi))
9010 	{
9011 	  /* Just remove clobbers, this should happen only if we have
9012 	     "privatized" local addressable variables in SIMD regions,
9013 	     the clobber isn't needed in that case and gimplifying address
9014 	     of the ARRAY_REF into a pointer and creating MEM_REF based
9015 	     clobber would create worse code than we get with the clobber
9016 	     dropped.  */
9017 	  if (gimple_clobber_p (stmt))
9018 	    {
9019 	      gsi_replace (gsi_p, gimple_build_nop (), true);
9020 	      break;
9021 	    }
9022 	  lower_omp_regimplify_operands (ctx, stmt, gsi_p);
9023 	}
9024       break;
9025     }
9026 }
9027 
9028 static void
9029 lower_omp (gimple_seq *body, omp_context *ctx)
9030 {
9031   location_t saved_location = input_location;
9032   gimple_stmt_iterator gsi;
9033   for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
9034     lower_omp_1 (&gsi, ctx);
9035   /* During gimplification, we haven't folded statments inside offloading
9036      or taskreg regions (gimplify.c:maybe_fold_stmt); do that now.  */
9037   if (target_nesting_level || taskreg_nesting_level)
9038     for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
9039       fold_stmt (&gsi);
9040   input_location = saved_location;
9041 }
9042 
9043 /* Main entry point.  */
9044 
9045 static unsigned int
9046 execute_lower_omp (void)
9047 {
9048   gimple_seq body;
9049   int i;
9050   omp_context *ctx;
9051 
9052   /* This pass always runs, to provide PROP_gimple_lomp.
9053      But often, there is nothing to do.  */
9054   if (flag_cilkplus == 0 && flag_openacc == 0 && flag_openmp == 0
9055       && flag_openmp_simd == 0)
9056     return 0;
9057 
9058   all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
9059 				 delete_omp_context);
9060 
9061   body = gimple_body (current_function_decl);
9062 
9063   if (hsa_gen_requested_p ())
9064     omp_grid_gridify_all_targets (&body);
9065 
9066   scan_omp (&body, NULL);
9067   gcc_assert (taskreg_nesting_level == 0);
9068   FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
9069     finish_taskreg_scan (ctx);
9070   taskreg_contexts.release ();
9071 
9072   if (all_contexts->root)
9073     {
9074       if (task_shared_vars)
9075 	push_gimplify_context ();
9076       lower_omp (&body, NULL);
9077       if (task_shared_vars)
9078 	pop_gimplify_context (NULL);
9079     }
9080 
9081   if (all_contexts)
9082     {
9083       splay_tree_delete (all_contexts);
9084       all_contexts = NULL;
9085     }
9086   BITMAP_FREE (task_shared_vars);
9087 
9088   /* If current function is a method, remove artificial dummy VAR_DECL created
9089      for non-static data member privatization, they aren't needed for
9090      debuginfo nor anything else, have been already replaced everywhere in the
9091      IL and cause problems with LTO.  */
9092   if (DECL_ARGUMENTS (current_function_decl)
9093       && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
9094       && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
9095 	  == POINTER_TYPE))
9096     remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
9097   return 0;
9098 }
9099 
9100 namespace {
9101 
9102 const pass_data pass_data_lower_omp =
9103 {
9104   GIMPLE_PASS, /* type */
9105   "omplower", /* name */
9106   OPTGROUP_OMP, /* optinfo_flags */
9107   TV_NONE, /* tv_id */
9108   PROP_gimple_any, /* properties_required */
9109   PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
9110   0, /* properties_destroyed */
9111   0, /* todo_flags_start */
9112   0, /* todo_flags_finish */
9113 };
9114 
9115 class pass_lower_omp : public gimple_opt_pass
9116 {
9117 public:
9118   pass_lower_omp (gcc::context *ctxt)
9119     : gimple_opt_pass (pass_data_lower_omp, ctxt)
9120   {}
9121 
9122   /* opt_pass methods: */
9123   virtual unsigned int execute (function *) { return execute_lower_omp (); }
9124 
9125 }; // class pass_lower_omp
9126 
9127 } // anon namespace
9128 
9129 gimple_opt_pass *
9130 make_pass_lower_omp (gcc::context *ctxt)
9131 {
9132   return new pass_lower_omp (ctxt);
9133 }
9134 
9135 /* The following is a utility to diagnose structured block violations.
9136    It is not part of the "omplower" pass, as that's invoked too late.  It
9137    should be invoked by the respective front ends after gimplification.  */
9138 
9139 static splay_tree all_labels;
9140 
9141 /* Check for mismatched contexts and generate an error if needed.  Return
9142    true if an error is detected.  */
9143 
9144 static bool
9145 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9146 	       gimple *branch_ctx, gimple *label_ctx)
9147 {
9148   gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9149   gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9150 
9151   if (label_ctx == branch_ctx)
9152     return false;
9153 
9154   const char* kind = NULL;
9155 
9156   if (flag_cilkplus)
9157     {
9158       if ((branch_ctx
9159 	   && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
9160 	   && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
9161 	  || (label_ctx
9162 	      && gimple_code (label_ctx) == GIMPLE_OMP_FOR
9163 	      && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
9164 	kind = "Cilk Plus";
9165     }
9166   if (flag_openacc)
9167     {
9168       if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9169 	  || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9170 	{
9171 	  gcc_checking_assert (kind == NULL);
9172 	  kind = "OpenACC";
9173 	}
9174     }
9175   if (kind == NULL)
9176     {
9177       gcc_checking_assert (flag_openmp || flag_openmp_simd);
9178       kind = "OpenMP";
9179     }
9180 
9181   /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9182      so we could traverse it and issue a correct "exit" or "enter" error
9183      message upon a structured block violation.
9184 
9185      We built the context by building a list with tree_cons'ing, but there is
9186      no easy counterpart in gimple tuples.  It seems like far too much work
9187      for issuing exit/enter error messages.  If someone really misses the
9188      distinct error message... patches welcome.  */
9189 
9190 #if 0
9191   /* Try to avoid confusing the user by producing and error message
9192      with correct "exit" or "enter" verbiage.  We prefer "exit"
9193      unless we can show that LABEL_CTX is nested within BRANCH_CTX.  */
9194   if (branch_ctx == NULL)
9195     exit_p = false;
9196   else
9197     {
9198       while (label_ctx)
9199 	{
9200 	  if (TREE_VALUE (label_ctx) == branch_ctx)
9201 	    {
9202 	      exit_p = false;
9203 	      break;
9204 	    }
9205 	  label_ctx = TREE_CHAIN (label_ctx);
9206 	}
9207     }
9208 
9209   if (exit_p)
9210     error ("invalid exit from %s structured block", kind);
9211   else
9212     error ("invalid entry to %s structured block", kind);
9213 #endif
9214 
9215   /* If it's obvious we have an invalid entry, be specific about the error.  */
9216   if (branch_ctx == NULL)
9217     error ("invalid entry to %s structured block", kind);
9218   else
9219     {
9220       /* Otherwise, be vague and lazy, but efficient.  */
9221       error ("invalid branch to/from %s structured block", kind);
9222     }
9223 
9224   gsi_replace (gsi_p, gimple_build_nop (), false);
9225   return true;
9226 }
9227 
9228 /* Pass 1: Create a minimal tree of structured blocks, and record
9229    where each label is found.  */
9230 
9231 static tree
9232 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9233     	       struct walk_stmt_info *wi)
9234 {
9235   gimple *context = (gimple *) wi->info;
9236   gimple *inner_context;
9237   gimple *stmt = gsi_stmt (*gsi_p);
9238 
9239   *handled_ops_p = true;
9240 
9241   switch (gimple_code (stmt))
9242     {
9243     WALK_SUBSTMTS;
9244 
9245     case GIMPLE_OMP_PARALLEL:
9246     case GIMPLE_OMP_TASK:
9247     case GIMPLE_OMP_SECTIONS:
9248     case GIMPLE_OMP_SINGLE:
9249     case GIMPLE_OMP_SECTION:
9250     case GIMPLE_OMP_MASTER:
9251     case GIMPLE_OMP_ORDERED:
9252     case GIMPLE_OMP_CRITICAL:
9253     case GIMPLE_OMP_TARGET:
9254     case GIMPLE_OMP_TEAMS:
9255     case GIMPLE_OMP_TASKGROUP:
9256       /* The minimal context here is just the current OMP construct.  */
9257       inner_context = stmt;
9258       wi->info = inner_context;
9259       walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9260       wi->info = context;
9261       break;
9262 
9263     case GIMPLE_OMP_FOR:
9264       inner_context = stmt;
9265       wi->info = inner_context;
9266       /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9267 	 walk them.  */
9268       walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9269 	  	       diagnose_sb_1, NULL, wi);
9270       walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9271       wi->info = context;
9272       break;
9273 
9274     case GIMPLE_LABEL:
9275       splay_tree_insert (all_labels,
9276 			 (splay_tree_key) gimple_label_label (
9277 					    as_a <glabel *> (stmt)),
9278 			 (splay_tree_value) context);
9279       break;
9280 
9281     default:
9282       break;
9283     }
9284 
9285   return NULL_TREE;
9286 }
9287 
9288 /* Pass 2: Check each branch and see if its context differs from that of
9289    the destination label's context.  */
9290 
9291 static tree
9292 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9293     	       struct walk_stmt_info *wi)
9294 {
9295   gimple *context = (gimple *) wi->info;
9296   splay_tree_node n;
9297   gimple *stmt = gsi_stmt (*gsi_p);
9298 
9299   *handled_ops_p = true;
9300 
9301   switch (gimple_code (stmt))
9302     {
9303     WALK_SUBSTMTS;
9304 
9305     case GIMPLE_OMP_PARALLEL:
9306     case GIMPLE_OMP_TASK:
9307     case GIMPLE_OMP_SECTIONS:
9308     case GIMPLE_OMP_SINGLE:
9309     case GIMPLE_OMP_SECTION:
9310     case GIMPLE_OMP_MASTER:
9311     case GIMPLE_OMP_ORDERED:
9312     case GIMPLE_OMP_CRITICAL:
9313     case GIMPLE_OMP_TARGET:
9314     case GIMPLE_OMP_TEAMS:
9315     case GIMPLE_OMP_TASKGROUP:
9316       wi->info = stmt;
9317       walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9318       wi->info = context;
9319       break;
9320 
9321     case GIMPLE_OMP_FOR:
9322       wi->info = stmt;
9323       /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9324 	 walk them.  */
9325       walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9326 			   diagnose_sb_2, NULL, wi);
9327       walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9328       wi->info = context;
9329       break;
9330 
9331     case GIMPLE_COND:
9332 	{
9333 	  gcond *cond_stmt = as_a <gcond *> (stmt);
9334 	  tree lab = gimple_cond_true_label (cond_stmt);
9335 	  if (lab)
9336 	    {
9337 	      n = splay_tree_lookup (all_labels,
9338 				     (splay_tree_key) lab);
9339 	      diagnose_sb_0 (gsi_p, context,
9340 			     n ? (gimple *) n->value : NULL);
9341 	    }
9342 	  lab = gimple_cond_false_label (cond_stmt);
9343 	  if (lab)
9344 	    {
9345 	      n = splay_tree_lookup (all_labels,
9346 				     (splay_tree_key) lab);
9347 	      diagnose_sb_0 (gsi_p, context,
9348 			     n ? (gimple *) n->value : NULL);
9349 	    }
9350 	}
9351       break;
9352 
9353     case GIMPLE_GOTO:
9354       {
9355 	tree lab = gimple_goto_dest (stmt);
9356 	if (TREE_CODE (lab) != LABEL_DECL)
9357 	  break;
9358 
9359 	n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9360 	diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9361       }
9362       break;
9363 
9364     case GIMPLE_SWITCH:
9365       {
9366 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
9367 	unsigned int i;
9368 	for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9369 	  {
9370 	    tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9371 	    n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9372 	    if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9373 	      break;
9374 	  }
9375       }
9376       break;
9377 
9378     case GIMPLE_RETURN:
9379       diagnose_sb_0 (gsi_p, context, NULL);
9380       break;
9381 
9382     default:
9383       break;
9384     }
9385 
9386   return NULL_TREE;
9387 }
9388 
9389 static unsigned int
9390 diagnose_omp_structured_block_errors (void)
9391 {
9392   struct walk_stmt_info wi;
9393   gimple_seq body = gimple_body (current_function_decl);
9394 
9395   all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9396 
9397   memset (&wi, 0, sizeof (wi));
9398   walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9399 
9400   memset (&wi, 0, sizeof (wi));
9401   wi.want_locations = true;
9402   walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9403 
9404   gimple_set_body (current_function_decl, body);
9405 
9406   splay_tree_delete (all_labels);
9407   all_labels = NULL;
9408 
9409   return 0;
9410 }
9411 
9412 namespace {
9413 
9414 const pass_data pass_data_diagnose_omp_blocks =
9415 {
9416   GIMPLE_PASS, /* type */
9417   "*diagnose_omp_blocks", /* name */
9418   OPTGROUP_OMP, /* optinfo_flags */
9419   TV_NONE, /* tv_id */
9420   PROP_gimple_any, /* properties_required */
9421   0, /* properties_provided */
9422   0, /* properties_destroyed */
9423   0, /* todo_flags_start */
9424   0, /* todo_flags_finish */
9425 };
9426 
9427 class pass_diagnose_omp_blocks : public gimple_opt_pass
9428 {
9429 public:
9430   pass_diagnose_omp_blocks (gcc::context *ctxt)
9431     : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9432   {}
9433 
9434   /* opt_pass methods: */
9435   virtual bool gate (function *)
9436   {
9437     return flag_cilkplus || flag_openacc || flag_openmp || flag_openmp_simd;
9438   }
9439   virtual unsigned int execute (function *)
9440     {
9441       return diagnose_omp_structured_block_errors ();
9442     }
9443 
9444 }; // class pass_diagnose_omp_blocks
9445 
9446 } // anon namespace
9447 
9448 gimple_opt_pass *
9449 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9450 {
9451   return new pass_diagnose_omp_blocks (ctxt);
9452 }
9453 
9454 
9455 #include "gt-omp-low.h"
9456