xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/omp-low.c (revision e6c7e151de239c49d2e38720a061ed9d1fa99309)
1 /* Lowering pass for OMP directives.  Converts OMP directives into explicit
2    calls to the runtime library (libgomp), data marshalling to implement data
3    sharing and copying clauses, offloading to accelerators, and more.
4 
5    Contributed by Diego Novillo <dnovillo@redhat.com>
6 
7    Copyright (C) 2005-2017 Free Software Foundation, Inc.
8 
9 This file is part of GCC.
10 
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15 
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
19 for more details.
20 
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3.  If not see
23 <http://www.gnu.org/licenses/>.  */
24 
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 
62 /* Lowering of OMP parallel and workshare constructs proceeds in two
63    phases.  The first phase scans the function looking for OMP statements
64    and then for variables that must be replaced to satisfy data sharing
65    clauses.  The second phase expands code for the constructs, as well as
66    re-gimplifying things when variables have been replaced with complex
67    expressions.
68 
69    Final code generation is done by pass_expand_omp.  The flowgraph is
70    scanned for regions which are then moved to a new
71    function, to be invoked by the thread library, or offloaded.  */
72 
73 /* Context structure.  Used to store information about each parallel
74    directive in the code.  */
75 
76 struct omp_context
77 {
78   /* This field must be at the beginning, as we do "inheritance": Some
79      callback functions for tree-inline.c (e.g., omp_copy_decl)
80      receive a copy_body_data pointer that is up-casted to an
81      omp_context pointer.  */
82   copy_body_data cb;
83 
84   /* The tree of contexts corresponding to the encountered constructs.  */
85   struct omp_context *outer;
86   gimple *stmt;
87 
88   /* Map variables to fields in a structure that allows communication
89      between sending and receiving threads.  */
90   splay_tree field_map;
91   tree record_type;
92   tree sender_decl;
93   tree receiver_decl;
94 
95   /* These are used just by task contexts, if task firstprivate fn is
96      needed.  srecord_type is used to communicate from the thread
97      that encountered the task construct to task firstprivate fn,
98      record_type is allocated by GOMP_task, initialized by task firstprivate
99      fn and passed to the task body fn.  */
100   splay_tree sfield_map;
101   tree srecord_type;
102 
103   /* A chain of variables to add to the top-level block surrounding the
104      construct.  In the case of a parallel, this is in the child function.  */
105   tree block_vars;
106 
107   /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
108      barriers should jump to during omplower pass.  */
109   tree cancel_label;
110 
111   /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
112      otherwise.  */
113   gimple *simt_stmt;
114 
115   /* What to do with variables with implicitly determined sharing
116      attributes.  */
117   enum omp_clause_default_kind default_kind;
118 
119   /* Nesting depth of this context.  Used to beautify error messages re
120      invalid gotos.  The outermost ctx is depth 1, with depth 0 being
121      reserved for the main body of the function.  */
122   int depth;
123 
124   /* True if this parallel directive is nested within another.  */
125   bool is_nested;
126 
127   /* True if this construct can be cancelled.  */
128   bool cancellable;
129 };
130 
131 static splay_tree all_contexts;
132 static int taskreg_nesting_level;
133 static int target_nesting_level;
134 static bitmap task_shared_vars;
135 static vec<omp_context *> taskreg_contexts;
136 
137 static void scan_omp (gimple_seq *, omp_context *);
138 static tree scan_omp_1_op (tree *, int *, void *);
139 
140 #define WALK_SUBSTMTS  \
141     case GIMPLE_BIND: \
142     case GIMPLE_TRY: \
143     case GIMPLE_CATCH: \
144     case GIMPLE_EH_FILTER: \
145     case GIMPLE_TRANSACTION: \
146       /* The sub-statements for these should be walked.  */ \
147       *handled_ops_p = false; \
148       break;
149 
150 /* Return true if CTX corresponds to an oacc parallel region.  */
151 
152 static bool
153 is_oacc_parallel (omp_context *ctx)
154 {
155   enum gimple_code outer_type = gimple_code (ctx->stmt);
156   return ((outer_type == GIMPLE_OMP_TARGET)
157 	  && (gimple_omp_target_kind (ctx->stmt)
158 	      == GF_OMP_TARGET_KIND_OACC_PARALLEL));
159 }
160 
161 /* Return true if CTX corresponds to an oacc kernels region.  */
162 
163 static bool
164 is_oacc_kernels (omp_context *ctx)
165 {
166   enum gimple_code outer_type = gimple_code (ctx->stmt);
167   return ((outer_type == GIMPLE_OMP_TARGET)
168 	  && (gimple_omp_target_kind (ctx->stmt)
169 	      == GF_OMP_TARGET_KIND_OACC_KERNELS));
170 }
171 
172 /* If DECL is the artificial dummy VAR_DECL created for non-static
173    data member privatization, return the underlying "this" parameter,
174    otherwise return NULL.  */
175 
176 tree
177 omp_member_access_dummy_var (tree decl)
178 {
179   if (!VAR_P (decl)
180       || !DECL_ARTIFICIAL (decl)
181       || !DECL_IGNORED_P (decl)
182       || !DECL_HAS_VALUE_EXPR_P (decl)
183       || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
184     return NULL_TREE;
185 
186   tree v = DECL_VALUE_EXPR (decl);
187   if (TREE_CODE (v) != COMPONENT_REF)
188     return NULL_TREE;
189 
190   while (1)
191     switch (TREE_CODE (v))
192       {
193       case COMPONENT_REF:
194       case MEM_REF:
195       case INDIRECT_REF:
196       CASE_CONVERT:
197       case POINTER_PLUS_EXPR:
198 	v = TREE_OPERAND (v, 0);
199 	continue;
200       case PARM_DECL:
201 	if (DECL_CONTEXT (v) == current_function_decl
202 	    && DECL_ARTIFICIAL (v)
203 	    && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
204 	  return v;
205 	return NULL_TREE;
206       default:
207 	return NULL_TREE;
208       }
209 }
210 
211 /* Helper for unshare_and_remap, called through walk_tree.  */
212 
213 static tree
214 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
215 {
216   tree *pair = (tree *) data;
217   if (*tp == pair[0])
218     {
219       *tp = unshare_expr (pair[1]);
220       *walk_subtrees = 0;
221     }
222   else if (IS_TYPE_OR_DECL_P (*tp))
223     *walk_subtrees = 0;
224   return NULL_TREE;
225 }
226 
227 /* Return unshare_expr (X) with all occurrences of FROM
228    replaced with TO.  */
229 
230 static tree
231 unshare_and_remap (tree x, tree from, tree to)
232 {
233   tree pair[2] = { from, to };
234   x = unshare_expr (x);
235   walk_tree (&x, unshare_and_remap_1, pair, NULL);
236   return x;
237 }
238 
239 /* Convenience function for calling scan_omp_1_op on tree operands.  */
240 
241 static inline tree
242 scan_omp_op (tree *tp, omp_context *ctx)
243 {
244   struct walk_stmt_info wi;
245 
246   memset (&wi, 0, sizeof (wi));
247   wi.info = ctx;
248   wi.want_locations = true;
249 
250   return walk_tree (tp, scan_omp_1_op, &wi, NULL);
251 }
252 
253 static void lower_omp (gimple_seq *, omp_context *);
254 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
255 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
256 
257 /* Return true if CTX is for an omp parallel.  */
258 
259 static inline bool
260 is_parallel_ctx (omp_context *ctx)
261 {
262   return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
263 }
264 
265 
266 /* Return true if CTX is for an omp task.  */
267 
268 static inline bool
269 is_task_ctx (omp_context *ctx)
270 {
271   return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
272 }
273 
274 
275 /* Return true if CTX is for an omp taskloop.  */
276 
277 static inline bool
278 is_taskloop_ctx (omp_context *ctx)
279 {
280   return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
281 	 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
282 }
283 
284 
285 /* Return true if CTX is for an omp parallel or omp task.  */
286 
287 static inline bool
288 is_taskreg_ctx (omp_context *ctx)
289 {
290   return is_parallel_ctx (ctx) || is_task_ctx (ctx);
291 }
292 
293 /* Return true if EXPR is variable sized.  */
294 
295 static inline bool
296 is_variable_sized (const_tree expr)
297 {
298   return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
299 }
300 
301 /* Lookup variables.  The "maybe" form
302    allows for the variable form to not have been entered, otherwise we
303    assert that the variable must have been entered.  */
304 
305 static inline tree
306 lookup_decl (tree var, omp_context *ctx)
307 {
308   tree *n = ctx->cb.decl_map->get (var);
309   return *n;
310 }
311 
312 static inline tree
313 maybe_lookup_decl (const_tree var, omp_context *ctx)
314 {
315   tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
316   return n ? *n : NULL_TREE;
317 }
318 
319 static inline tree
320 lookup_field (tree var, omp_context *ctx)
321 {
322   splay_tree_node n;
323   n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
324   return (tree) n->value;
325 }
326 
327 static inline tree
328 lookup_sfield (splay_tree_key key, omp_context *ctx)
329 {
330   splay_tree_node n;
331   n = splay_tree_lookup (ctx->sfield_map
332 			 ? ctx->sfield_map : ctx->field_map, key);
333   return (tree) n->value;
334 }
335 
336 static inline tree
337 lookup_sfield (tree var, omp_context *ctx)
338 {
339   return lookup_sfield ((splay_tree_key) var, ctx);
340 }
341 
342 static inline tree
343 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
344 {
345   splay_tree_node n;
346   n = splay_tree_lookup (ctx->field_map, key);
347   return n ? (tree) n->value : NULL_TREE;
348 }
349 
350 static inline tree
351 maybe_lookup_field (tree var, omp_context *ctx)
352 {
353   return maybe_lookup_field ((splay_tree_key) var, ctx);
354 }
355 
356 /* Return true if DECL should be copied by pointer.  SHARED_CTX is
357    the parallel context if DECL is to be shared.  */
358 
359 static bool
360 use_pointer_for_field (tree decl, omp_context *shared_ctx)
361 {
362   if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
363       || TYPE_ATOMIC (TREE_TYPE (decl)))
364     return true;
365 
366   /* We can only use copy-in/copy-out semantics for shared variables
367      when we know the value is not accessible from an outer scope.  */
368   if (shared_ctx)
369     {
370       gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
371 
372       /* ??? Trivially accessible from anywhere.  But why would we even
373 	 be passing an address in this case?  Should we simply assert
374 	 this to be false, or should we have a cleanup pass that removes
375 	 these from the list of mappings?  */
376       if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
377 	return true;
378 
379       /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
380 	 without analyzing the expression whether or not its location
381 	 is accessible to anyone else.  In the case of nested parallel
382 	 regions it certainly may be.  */
383       if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
384 	return true;
385 
386       /* Do not use copy-in/copy-out for variables that have their
387 	 address taken.  */
388       if (TREE_ADDRESSABLE (decl))
389 	return true;
390 
391       /* lower_send_shared_vars only uses copy-in, but not copy-out
392 	 for these.  */
393       if (TREE_READONLY (decl)
394 	  || ((TREE_CODE (decl) == RESULT_DECL
395 	       || TREE_CODE (decl) == PARM_DECL)
396 	      && DECL_BY_REFERENCE (decl)))
397 	return false;
398 
399       /* Disallow copy-in/out in nested parallel if
400 	 decl is shared in outer parallel, otherwise
401 	 each thread could store the shared variable
402 	 in its own copy-in location, making the
403 	 variable no longer really shared.  */
404       if (shared_ctx->is_nested)
405 	{
406 	  omp_context *up;
407 
408 	  for (up = shared_ctx->outer; up; up = up->outer)
409 	    if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
410 	      break;
411 
412 	  if (up)
413 	    {
414 	      tree c;
415 
416 	      for (c = gimple_omp_taskreg_clauses (up->stmt);
417 		   c; c = OMP_CLAUSE_CHAIN (c))
418 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
419 		    && OMP_CLAUSE_DECL (c) == decl)
420 		  break;
421 
422 	      if (c)
423 		goto maybe_mark_addressable_and_ret;
424 	    }
425 	}
426 
427       /* For tasks avoid using copy-in/out.  As tasks can be
428 	 deferred or executed in different thread, when GOMP_task
429 	 returns, the task hasn't necessarily terminated.  */
430       if (is_task_ctx (shared_ctx))
431 	{
432 	  tree outer;
433 	maybe_mark_addressable_and_ret:
434 	  outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
435 	  if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
436 	    {
437 	      /* Taking address of OUTER in lower_send_shared_vars
438 		 might need regimplification of everything that uses the
439 		 variable.  */
440 	      if (!task_shared_vars)
441 		task_shared_vars = BITMAP_ALLOC (NULL);
442 	      bitmap_set_bit (task_shared_vars, DECL_UID (outer));
443 	      TREE_ADDRESSABLE (outer) = 1;
444 	    }
445 	  return true;
446 	}
447     }
448 
449   return false;
450 }
451 
452 /* Construct a new automatic decl similar to VAR.  */
453 
454 static tree
455 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
456 {
457   tree copy = copy_var_decl (var, name, type);
458 
459   DECL_CONTEXT (copy) = current_function_decl;
460   DECL_CHAIN (copy) = ctx->block_vars;
461   /* If VAR is listed in task_shared_vars, it means it wasn't
462      originally addressable and is just because task needs to take
463      it's address.  But we don't need to take address of privatizations
464      from that var.  */
465   if (TREE_ADDRESSABLE (var)
466       && task_shared_vars
467       && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
468     TREE_ADDRESSABLE (copy) = 0;
469   ctx->block_vars = copy;
470 
471   return copy;
472 }
473 
474 static tree
475 omp_copy_decl_1 (tree var, omp_context *ctx)
476 {
477   return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
478 }
479 
480 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
481    as appropriate.  */
482 static tree
483 omp_build_component_ref (tree obj, tree field)
484 {
485   tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
486   if (TREE_THIS_VOLATILE (field))
487     TREE_THIS_VOLATILE (ret) |= 1;
488   if (TREE_READONLY (field))
489     TREE_READONLY (ret) |= 1;
490   return ret;
491 }
492 
493 /* Build tree nodes to access the field for VAR on the receiver side.  */
494 
495 static tree
496 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
497 {
498   tree x, field = lookup_field (var, ctx);
499 
500   /* If the receiver record type was remapped in the child function,
501      remap the field into the new record type.  */
502   x = maybe_lookup_field (field, ctx);
503   if (x != NULL)
504     field = x;
505 
506   x = build_simple_mem_ref (ctx->receiver_decl);
507   TREE_THIS_NOTRAP (x) = 1;
508   x = omp_build_component_ref (x, field);
509   if (by_ref)
510     {
511       x = build_simple_mem_ref (x);
512       TREE_THIS_NOTRAP (x) = 1;
513     }
514 
515   return x;
516 }
517 
518 /* Build tree nodes to access VAR in the scope outer to CTX.  In the case
519    of a parallel, this is a component reference; for workshare constructs
520    this is some variable.  */
521 
522 static tree
523 build_outer_var_ref (tree var, omp_context *ctx,
524 		     enum omp_clause_code code = OMP_CLAUSE_ERROR)
525 {
526   tree x;
527 
528   if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
529     x = var;
530   else if (is_variable_sized (var))
531     {
532       x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
533       x = build_outer_var_ref (x, ctx, code);
534       x = build_simple_mem_ref (x);
535     }
536   else if (is_taskreg_ctx (ctx))
537     {
538       bool by_ref = use_pointer_for_field (var, NULL);
539       x = build_receiver_ref (var, by_ref, ctx);
540     }
541   else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
542 	    && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
543 	   || (code == OMP_CLAUSE_PRIVATE
544 	       && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
545 		   || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
546 		   || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
547     {
548       /* #pragma omp simd isn't a worksharing construct, and can reference
549 	 even private vars in its linear etc. clauses.
550 	 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
551 	 to private vars in all worksharing constructs.  */
552       x = NULL_TREE;
553       if (ctx->outer && is_taskreg_ctx (ctx))
554 	x = lookup_decl (var, ctx->outer);
555       else if (ctx->outer)
556 	x = maybe_lookup_decl_in_outer_ctx (var, ctx);
557       if (x == NULL_TREE)
558 	x = var;
559     }
560   else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
561     {
562       gcc_assert (ctx->outer);
563       splay_tree_node n
564 	= splay_tree_lookup (ctx->outer->field_map,
565 			     (splay_tree_key) &DECL_UID (var));
566       if (n == NULL)
567 	{
568 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
569 	    x = var;
570 	  else
571 	    x = lookup_decl (var, ctx->outer);
572 	}
573       else
574 	{
575 	  tree field = (tree) n->value;
576 	  /* If the receiver record type was remapped in the child function,
577 	     remap the field into the new record type.  */
578 	  x = maybe_lookup_field (field, ctx->outer);
579 	  if (x != NULL)
580 	    field = x;
581 
582 	  x = build_simple_mem_ref (ctx->outer->receiver_decl);
583 	  x = omp_build_component_ref (x, field);
584 	  if (use_pointer_for_field (var, ctx->outer))
585 	    x = build_simple_mem_ref (x);
586 	}
587     }
588   else if (ctx->outer)
589     {
590       omp_context *outer = ctx->outer;
591       if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
592 	{
593 	  outer = outer->outer;
594 	  gcc_assert (outer
595 		      && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
596 	}
597       x = lookup_decl (var, outer);
598     }
599   else if (omp_is_reference (var))
600     /* This can happen with orphaned constructs.  If var is reference, it is
601        possible it is shared and as such valid.  */
602     x = var;
603   else if (omp_member_access_dummy_var (var))
604     x = var;
605   else
606     gcc_unreachable ();
607 
608   if (x == var)
609     {
610       tree t = omp_member_access_dummy_var (var);
611       if (t)
612 	{
613 	  x = DECL_VALUE_EXPR (var);
614 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
615 	  if (o != t)
616 	    x = unshare_and_remap (x, t, o);
617 	  else
618 	    x = unshare_expr (x);
619 	}
620     }
621 
622   if (omp_is_reference (var))
623     x = build_simple_mem_ref (x);
624 
625   return x;
626 }
627 
628 /* Build tree nodes to access the field for VAR on the sender side.  */
629 
630 static tree
631 build_sender_ref (splay_tree_key key, omp_context *ctx)
632 {
633   tree field = lookup_sfield (key, ctx);
634   return omp_build_component_ref (ctx->sender_decl, field);
635 }
636 
637 static tree
638 build_sender_ref (tree var, omp_context *ctx)
639 {
640   return build_sender_ref ((splay_tree_key) var, ctx);
641 }
642 
643 /* Add a new field for VAR inside the structure CTX->SENDER_DECL.  If
644    BASE_POINTERS_RESTRICT, declare the field with restrict.  */
645 
646 static void
647 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
648 		   bool base_pointers_restrict = false)
649 {
650   tree field, type, sfield = NULL_TREE;
651   splay_tree_key key = (splay_tree_key) var;
652 
653   if ((mask & 8) != 0)
654     {
655       key = (splay_tree_key) &DECL_UID (var);
656       gcc_checking_assert (key != (splay_tree_key) var);
657     }
658   gcc_assert ((mask & 1) == 0
659 	      || !splay_tree_lookup (ctx->field_map, key));
660   gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
661 	      || !splay_tree_lookup (ctx->sfield_map, key));
662   gcc_assert ((mask & 3) == 3
663 	      || !is_gimple_omp_oacc (ctx->stmt));
664 
665   type = TREE_TYPE (var);
666   /* Prevent redeclaring the var in the split-off function with a restrict
667      pointer type.  Note that we only clear type itself, restrict qualifiers in
668      the pointed-to type will be ignored by points-to analysis.  */
669   if (POINTER_TYPE_P (type)
670       && TYPE_RESTRICT (type))
671     type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
672 
673   if (mask & 4)
674     {
675       gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
676       type = build_pointer_type (build_pointer_type (type));
677     }
678   else if (by_ref)
679     {
680       type = build_pointer_type (type);
681       if (base_pointers_restrict)
682 	type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
683     }
684   else if ((mask & 3) == 1 && omp_is_reference (var))
685     type = TREE_TYPE (type);
686 
687   field = build_decl (DECL_SOURCE_LOCATION (var),
688 		      FIELD_DECL, DECL_NAME (var), type);
689 
690   /* Remember what variable this field was created for.  This does have a
691      side effect of making dwarf2out ignore this member, so for helpful
692      debugging we clear it later in delete_omp_context.  */
693   DECL_ABSTRACT_ORIGIN (field) = var;
694   if (type == TREE_TYPE (var))
695     {
696       SET_DECL_ALIGN (field, DECL_ALIGN (var));
697       DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
698       TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
699     }
700   else
701     SET_DECL_ALIGN (field, TYPE_ALIGN (type));
702 
703   if ((mask & 3) == 3)
704     {
705       insert_field_into_struct (ctx->record_type, field);
706       if (ctx->srecord_type)
707 	{
708 	  sfield = build_decl (DECL_SOURCE_LOCATION (var),
709 			       FIELD_DECL, DECL_NAME (var), type);
710 	  DECL_ABSTRACT_ORIGIN (sfield) = var;
711 	  SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
712 	  DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
713 	  TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
714 	  insert_field_into_struct (ctx->srecord_type, sfield);
715 	}
716     }
717   else
718     {
719       if (ctx->srecord_type == NULL_TREE)
720 	{
721 	  tree t;
722 
723 	  ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
724 	  ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
725 	  for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
726 	    {
727 	      sfield = build_decl (DECL_SOURCE_LOCATION (t),
728 				   FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
729 	      DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
730 	      insert_field_into_struct (ctx->srecord_type, sfield);
731 	      splay_tree_insert (ctx->sfield_map,
732 				 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
733 				 (splay_tree_value) sfield);
734 	    }
735 	}
736       sfield = field;
737       insert_field_into_struct ((mask & 1) ? ctx->record_type
738 				: ctx->srecord_type, field);
739     }
740 
741   if (mask & 1)
742     splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
743   if ((mask & 2) && ctx->sfield_map)
744     splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
745 }
746 
747 static tree
748 install_var_local (tree var, omp_context *ctx)
749 {
750   tree new_var = omp_copy_decl_1 (var, ctx);
751   insert_decl_map (&ctx->cb, var, new_var);
752   return new_var;
753 }
754 
755 /* Adjust the replacement for DECL in CTX for the new context.  This means
756    copying the DECL_VALUE_EXPR, and fixing up the type.  */
757 
758 static void
759 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
760 {
761   tree new_decl, size;
762 
763   new_decl = lookup_decl (decl, ctx);
764 
765   TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
766 
767   if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
768       && DECL_HAS_VALUE_EXPR_P (decl))
769     {
770       tree ve = DECL_VALUE_EXPR (decl);
771       walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
772       SET_DECL_VALUE_EXPR (new_decl, ve);
773       DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
774     }
775 
776   if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
777     {
778       size = remap_decl (DECL_SIZE (decl), &ctx->cb);
779       if (size == error_mark_node)
780 	size = TYPE_SIZE (TREE_TYPE (new_decl));
781       DECL_SIZE (new_decl) = size;
782 
783       size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
784       if (size == error_mark_node)
785 	size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
786       DECL_SIZE_UNIT (new_decl) = size;
787     }
788 }
789 
790 /* The callback for remap_decl.  Search all containing contexts for a
791    mapping of the variable; this avoids having to duplicate the splay
792    tree ahead of time.  We know a mapping doesn't already exist in the
793    given context.  Create new mappings to implement default semantics.  */
794 
795 static tree
796 omp_copy_decl (tree var, copy_body_data *cb)
797 {
798   omp_context *ctx = (omp_context *) cb;
799   tree new_var;
800 
801   if (TREE_CODE (var) == LABEL_DECL)
802     {
803       if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
804 	return var;
805       new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
806       DECL_CONTEXT (new_var) = current_function_decl;
807       insert_decl_map (&ctx->cb, var, new_var);
808       return new_var;
809     }
810 
811   while (!is_taskreg_ctx (ctx))
812     {
813       ctx = ctx->outer;
814       if (ctx == NULL)
815 	return var;
816       new_var = maybe_lookup_decl (var, ctx);
817       if (new_var)
818 	return new_var;
819     }
820 
821   if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
822     return var;
823 
824   return error_mark_node;
825 }
826 
827 /* Create a new context, with OUTER_CTX being the surrounding context.  */
828 
829 static omp_context *
830 new_omp_context (gimple *stmt, omp_context *outer_ctx)
831 {
832   omp_context *ctx = XCNEW (omp_context);
833 
834   splay_tree_insert (all_contexts, (splay_tree_key) stmt,
835 		     (splay_tree_value) ctx);
836   ctx->stmt = stmt;
837 
838   if (outer_ctx)
839     {
840       ctx->outer = outer_ctx;
841       ctx->cb = outer_ctx->cb;
842       ctx->cb.block = NULL;
843       ctx->depth = outer_ctx->depth + 1;
844     }
845   else
846     {
847       ctx->cb.src_fn = current_function_decl;
848       ctx->cb.dst_fn = current_function_decl;
849       ctx->cb.src_node = cgraph_node::get (current_function_decl);
850       gcc_checking_assert (ctx->cb.src_node);
851       ctx->cb.dst_node = ctx->cb.src_node;
852       ctx->cb.src_cfun = cfun;
853       ctx->cb.copy_decl = omp_copy_decl;
854       ctx->cb.eh_lp_nr = 0;
855       ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
856       ctx->depth = 1;
857     }
858 
859   ctx->cb.decl_map = new hash_map<tree, tree>;
860 
861   return ctx;
862 }
863 
864 static gimple_seq maybe_catch_exception (gimple_seq);
865 
866 /* Finalize task copyfn.  */
867 
868 static void
869 finalize_task_copyfn (gomp_task *task_stmt)
870 {
871   struct function *child_cfun;
872   tree child_fn;
873   gimple_seq seq = NULL, new_seq;
874   gbind *bind;
875 
876   child_fn = gimple_omp_task_copy_fn (task_stmt);
877   if (child_fn == NULL_TREE)
878     return;
879 
880   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
881   DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
882 
883   push_cfun (child_cfun);
884   bind = gimplify_body (child_fn, false);
885   gimple_seq_add_stmt (&seq, bind);
886   new_seq = maybe_catch_exception (seq);
887   if (new_seq != seq)
888     {
889       bind = gimple_build_bind (NULL, new_seq, NULL);
890       seq = NULL;
891       gimple_seq_add_stmt (&seq, bind);
892     }
893   gimple_set_body (child_fn, seq);
894   pop_cfun ();
895 
896   /* Inform the callgraph about the new function.  */
897   cgraph_node *node = cgraph_node::get_create (child_fn);
898   node->parallelized_function = 1;
899   cgraph_node::add_new_function (child_fn, false);
900 }
901 
902 /* Destroy a omp_context data structures.  Called through the splay tree
903    value delete callback.  */
904 
905 static void
906 delete_omp_context (splay_tree_value value)
907 {
908   omp_context *ctx = (omp_context *) value;
909 
910   delete ctx->cb.decl_map;
911 
912   if (ctx->field_map)
913     splay_tree_delete (ctx->field_map);
914   if (ctx->sfield_map)
915     splay_tree_delete (ctx->sfield_map);
916 
917   /* We hijacked DECL_ABSTRACT_ORIGIN earlier.  We need to clear it before
918      it produces corrupt debug information.  */
919   if (ctx->record_type)
920     {
921       tree t;
922       for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
923 	DECL_ABSTRACT_ORIGIN (t) = NULL;
924     }
925   if (ctx->srecord_type)
926     {
927       tree t;
928       for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
929 	DECL_ABSTRACT_ORIGIN (t) = NULL;
930     }
931 
932   if (is_task_ctx (ctx))
933     finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
934 
935   XDELETE (ctx);
936 }
937 
938 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
939    context.  */
940 
941 static void
942 fixup_child_record_type (omp_context *ctx)
943 {
944   tree f, type = ctx->record_type;
945 
946   if (!ctx->receiver_decl)
947     return;
948   /* ??? It isn't sufficient to just call remap_type here, because
949      variably_modified_type_p doesn't work the way we expect for
950      record types.  Testing each field for whether it needs remapping
951      and creating a new record by hand works, however.  */
952   for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
953     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
954       break;
955   if (f)
956     {
957       tree name, new_fields = NULL;
958 
959       type = lang_hooks.types.make_type (RECORD_TYPE);
960       name = DECL_NAME (TYPE_NAME (ctx->record_type));
961       name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
962 			 TYPE_DECL, name, type);
963       TYPE_NAME (type) = name;
964 
965       for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
966 	{
967 	  tree new_f = copy_node (f);
968 	  DECL_CONTEXT (new_f) = type;
969 	  TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
970 	  DECL_CHAIN (new_f) = new_fields;
971 	  walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
972 	  walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
973 		     &ctx->cb, NULL);
974 	  walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
975 		     &ctx->cb, NULL);
976 	  new_fields = new_f;
977 
978 	  /* Arrange to be able to look up the receiver field
979 	     given the sender field.  */
980 	  splay_tree_insert (ctx->field_map, (splay_tree_key) f,
981 			     (splay_tree_value) new_f);
982 	}
983       TYPE_FIELDS (type) = nreverse (new_fields);
984       layout_type (type);
985     }
986 
987   /* In a target region we never modify any of the pointers in *.omp_data_i,
988      so attempt to help the optimizers.  */
989   if (is_gimple_omp_offloaded (ctx->stmt))
990     type = build_qualified_type (type, TYPE_QUAL_CONST);
991 
992   TREE_TYPE (ctx->receiver_decl)
993     = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
994 }
995 
996 /* Instantiate decls as necessary in CTX to satisfy the data sharing
997    specified by CLAUSES.  If BASE_POINTERS_RESTRICT, install var field with
998    restrict.  */
999 
1000 static void
1001 scan_sharing_clauses (tree clauses, omp_context *ctx,
1002 		      bool base_pointers_restrict = false)
1003 {
1004   tree c, decl;
1005   bool scan_array_reductions = false;
1006 
1007   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1008     {
1009       bool by_ref;
1010 
1011       switch (OMP_CLAUSE_CODE (c))
1012 	{
1013 	case OMP_CLAUSE_PRIVATE:
1014 	  decl = OMP_CLAUSE_DECL (c);
1015 	  if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1016 	    goto do_private;
1017 	  else if (!is_variable_sized (decl))
1018 	    install_var_local (decl, ctx);
1019 	  break;
1020 
1021 	case OMP_CLAUSE_SHARED:
1022 	  decl = OMP_CLAUSE_DECL (c);
1023 	  /* Ignore shared directives in teams construct.  */
1024 	  if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1025 	    {
1026 	      /* Global variables don't need to be copied,
1027 		 the receiver side will use them directly.  */
1028 	      tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1029 	      if (is_global_var (odecl))
1030 		break;
1031 	      insert_decl_map (&ctx->cb, decl, odecl);
1032 	      break;
1033 	    }
1034 	  gcc_assert (is_taskreg_ctx (ctx));
1035 	  gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1036 		      || !is_variable_sized (decl));
1037 	  /* Global variables don't need to be copied,
1038 	     the receiver side will use them directly.  */
1039 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1040 	    break;
1041 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1042 	    {
1043 	      use_pointer_for_field (decl, ctx);
1044 	      break;
1045 	    }
1046 	  by_ref = use_pointer_for_field (decl, NULL);
1047 	  if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1048 	      || TREE_ADDRESSABLE (decl)
1049 	      || by_ref
1050 	      || omp_is_reference (decl))
1051 	    {
1052 	      by_ref = use_pointer_for_field (decl, ctx);
1053 	      install_var_field (decl, by_ref, 3, ctx);
1054 	      install_var_local (decl, ctx);
1055 	      break;
1056 	    }
1057 	  /* We don't need to copy const scalar vars back.  */
1058 	  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1059 	  goto do_private;
1060 
1061 	case OMP_CLAUSE_REDUCTION:
1062 	  decl = OMP_CLAUSE_DECL (c);
1063 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1064 	      && TREE_CODE (decl) == MEM_REF)
1065 	    {
1066 	      tree t = TREE_OPERAND (decl, 0);
1067 	      if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1068 		t = TREE_OPERAND (t, 0);
1069 	      if (TREE_CODE (t) == INDIRECT_REF
1070 		  || TREE_CODE (t) == ADDR_EXPR)
1071 		t = TREE_OPERAND (t, 0);
1072 	      install_var_local (t, ctx);
1073 	      if (is_taskreg_ctx (ctx)
1074 		  && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1075 		  && !is_variable_sized (t))
1076 		{
1077 		  by_ref = use_pointer_for_field (t, ctx);
1078 		  install_var_field (t, by_ref, 3, ctx);
1079 		}
1080 	      break;
1081 	    }
1082 	  goto do_private;
1083 
1084 	case OMP_CLAUSE_LASTPRIVATE:
1085 	  /* Let the corresponding firstprivate clause create
1086 	     the variable.  */
1087 	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1088 	    break;
1089 	  /* FALLTHRU */
1090 
1091 	case OMP_CLAUSE_FIRSTPRIVATE:
1092 	case OMP_CLAUSE_LINEAR:
1093 	  decl = OMP_CLAUSE_DECL (c);
1094 	do_private:
1095 	  if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1096 	       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1097 	      && is_gimple_omp_offloaded (ctx->stmt))
1098 	    {
1099 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1100 		install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1101 	      else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1102 		install_var_field (decl, true, 3, ctx);
1103 	      else
1104 		install_var_field (decl, false, 3, ctx);
1105 	    }
1106 	  if (is_variable_sized (decl))
1107 	    {
1108 	      if (is_task_ctx (ctx))
1109 		install_var_field (decl, false, 1, ctx);
1110 	      break;
1111 	    }
1112 	  else if (is_taskreg_ctx (ctx))
1113 	    {
1114 	      bool global
1115 		= is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1116 	      by_ref = use_pointer_for_field (decl, NULL);
1117 
1118 	      if (is_task_ctx (ctx)
1119 		  && (global || by_ref || omp_is_reference (decl)))
1120 		{
1121 		  install_var_field (decl, false, 1, ctx);
1122 		  if (!global)
1123 		    install_var_field (decl, by_ref, 2, ctx);
1124 		}
1125 	      else if (!global)
1126 		install_var_field (decl, by_ref, 3, ctx);
1127 	    }
1128 	  install_var_local (decl, ctx);
1129 	  break;
1130 
1131 	case OMP_CLAUSE_USE_DEVICE_PTR:
1132 	  decl = OMP_CLAUSE_DECL (c);
1133 	  if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1134 	    install_var_field (decl, true, 3, ctx);
1135 	  else
1136 	    install_var_field (decl, false, 3, ctx);
1137 	  if (DECL_SIZE (decl)
1138 	      && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1139 	    {
1140 	      tree decl2 = DECL_VALUE_EXPR (decl);
1141 	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1142 	      decl2 = TREE_OPERAND (decl2, 0);
1143 	      gcc_assert (DECL_P (decl2));
1144 	      install_var_local (decl2, ctx);
1145 	    }
1146 	  install_var_local (decl, ctx);
1147 	  break;
1148 
1149 	case OMP_CLAUSE_IS_DEVICE_PTR:
1150 	  decl = OMP_CLAUSE_DECL (c);
1151 	  goto do_private;
1152 
1153 	case OMP_CLAUSE__LOOPTEMP_:
1154 	  gcc_assert (is_taskreg_ctx (ctx));
1155 	  decl = OMP_CLAUSE_DECL (c);
1156 	  install_var_field (decl, false, 3, ctx);
1157 	  install_var_local (decl, ctx);
1158 	  break;
1159 
1160 	case OMP_CLAUSE_COPYPRIVATE:
1161 	case OMP_CLAUSE_COPYIN:
1162 	  decl = OMP_CLAUSE_DECL (c);
1163 	  by_ref = use_pointer_for_field (decl, NULL);
1164 	  install_var_field (decl, by_ref, 3, ctx);
1165 	  break;
1166 
1167 	case OMP_CLAUSE_DEFAULT:
1168 	  ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
1169 	  break;
1170 
1171 	case OMP_CLAUSE_FINAL:
1172 	case OMP_CLAUSE_IF:
1173 	case OMP_CLAUSE_NUM_THREADS:
1174 	case OMP_CLAUSE_NUM_TEAMS:
1175 	case OMP_CLAUSE_THREAD_LIMIT:
1176 	case OMP_CLAUSE_DEVICE:
1177 	case OMP_CLAUSE_SCHEDULE:
1178 	case OMP_CLAUSE_DIST_SCHEDULE:
1179 	case OMP_CLAUSE_DEPEND:
1180 	case OMP_CLAUSE_PRIORITY:
1181 	case OMP_CLAUSE_GRAINSIZE:
1182 	case OMP_CLAUSE_NUM_TASKS:
1183 	case OMP_CLAUSE__CILK_FOR_COUNT_:
1184 	case OMP_CLAUSE_NUM_GANGS:
1185 	case OMP_CLAUSE_NUM_WORKERS:
1186 	case OMP_CLAUSE_VECTOR_LENGTH:
1187 	  if (ctx->outer)
1188 	    scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1189 	  break;
1190 
1191 	case OMP_CLAUSE_TO:
1192 	case OMP_CLAUSE_FROM:
1193 	case OMP_CLAUSE_MAP:
1194 	  if (ctx->outer)
1195 	    scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1196 	  decl = OMP_CLAUSE_DECL (c);
1197 	  /* Global variables with "omp declare target" attribute
1198 	     don't need to be copied, the receiver side will use them
1199 	     directly.  However, global variables with "omp declare target link"
1200 	     attribute need to be copied.  Or when ALWAYS modifier is used.  */
1201 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1202 	      && DECL_P (decl)
1203 	      && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1204 		   && (OMP_CLAUSE_MAP_KIND (c)
1205 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1206 		  || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1207 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1208 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1209 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1210 	      && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1211 	      && varpool_node::get_create (decl)->offloadable
1212 	      && !lookup_attribute ("omp declare target link",
1213 				    DECL_ATTRIBUTES (decl)))
1214 	    break;
1215 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1216 	      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1217 	    {
1218 	      /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1219 		 not offloaded; there is nothing to map for those.  */
1220 	      if (!is_gimple_omp_offloaded (ctx->stmt)
1221 		  && !POINTER_TYPE_P (TREE_TYPE (decl))
1222 		  && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1223 		break;
1224 	    }
1225 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1226 	      && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1227 		  || (OMP_CLAUSE_MAP_KIND (c)
1228 		      == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1229 	    {
1230 	      if (TREE_CODE (decl) == COMPONENT_REF
1231 		  || (TREE_CODE (decl) == INDIRECT_REF
1232 		      && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1233 		      && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1234 			  == REFERENCE_TYPE)))
1235 		break;
1236 	      if (DECL_SIZE (decl)
1237 		  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1238 		{
1239 		  tree decl2 = DECL_VALUE_EXPR (decl);
1240 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1241 		  decl2 = TREE_OPERAND (decl2, 0);
1242 		  gcc_assert (DECL_P (decl2));
1243 		  install_var_local (decl2, ctx);
1244 		}
1245 	      install_var_local (decl, ctx);
1246 	      break;
1247 	    }
1248 	  if (DECL_P (decl))
1249 	    {
1250 	      if (DECL_SIZE (decl)
1251 		  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1252 		{
1253 		  tree decl2 = DECL_VALUE_EXPR (decl);
1254 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1255 		  decl2 = TREE_OPERAND (decl2, 0);
1256 		  gcc_assert (DECL_P (decl2));
1257 		  install_var_field (decl2, true, 3, ctx);
1258 		  install_var_local (decl2, ctx);
1259 		  install_var_local (decl, ctx);
1260 		}
1261 	      else
1262 		{
1263 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1264 		      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1265 		      && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1266 		      && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1267 		    install_var_field (decl, true, 7, ctx);
1268 		  else
1269 		    install_var_field (decl, true, 3, ctx,
1270 				       base_pointers_restrict);
1271 		  if (is_gimple_omp_offloaded (ctx->stmt)
1272 		      && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1273 		    install_var_local (decl, ctx);
1274 		}
1275 	    }
1276 	  else
1277 	    {
1278 	      tree base = get_base_address (decl);
1279 	      tree nc = OMP_CLAUSE_CHAIN (c);
1280 	      if (DECL_P (base)
1281 		  && nc != NULL_TREE
1282 		  && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1283 		  && OMP_CLAUSE_DECL (nc) == base
1284 		  && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1285 		  && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1286 		{
1287 		  OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1288 		  OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1289 		}
1290 	      else
1291 		{
1292 		  if (ctx->outer)
1293 		    {
1294 		      scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1295 		      decl = OMP_CLAUSE_DECL (c);
1296 		    }
1297 		  gcc_assert (!splay_tree_lookup (ctx->field_map,
1298 						  (splay_tree_key) decl));
1299 		  tree field
1300 		    = build_decl (OMP_CLAUSE_LOCATION (c),
1301 				  FIELD_DECL, NULL_TREE, ptr_type_node);
1302 		  SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1303 		  insert_field_into_struct (ctx->record_type, field);
1304 		  splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1305 				     (splay_tree_value) field);
1306 		}
1307 	    }
1308 	  break;
1309 
1310 	case OMP_CLAUSE__GRIDDIM_:
1311 	  if (ctx->outer)
1312 	    {
1313 	      scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1314 	      scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1315 	    }
1316 	  break;
1317 
1318 	case OMP_CLAUSE_NOWAIT:
1319 	case OMP_CLAUSE_ORDERED:
1320 	case OMP_CLAUSE_COLLAPSE:
1321 	case OMP_CLAUSE_UNTIED:
1322 	case OMP_CLAUSE_MERGEABLE:
1323 	case OMP_CLAUSE_PROC_BIND:
1324 	case OMP_CLAUSE_SAFELEN:
1325 	case OMP_CLAUSE_SIMDLEN:
1326 	case OMP_CLAUSE_THREADS:
1327 	case OMP_CLAUSE_SIMD:
1328 	case OMP_CLAUSE_NOGROUP:
1329 	case OMP_CLAUSE_DEFAULTMAP:
1330 	case OMP_CLAUSE_ASYNC:
1331 	case OMP_CLAUSE_WAIT:
1332 	case OMP_CLAUSE_GANG:
1333 	case OMP_CLAUSE_WORKER:
1334 	case OMP_CLAUSE_VECTOR:
1335 	case OMP_CLAUSE_INDEPENDENT:
1336 	case OMP_CLAUSE_AUTO:
1337 	case OMP_CLAUSE_SEQ:
1338 	case OMP_CLAUSE_TILE:
1339 	case OMP_CLAUSE__SIMT_:
1340 	  break;
1341 
1342 	case OMP_CLAUSE_ALIGNED:
1343 	  decl = OMP_CLAUSE_DECL (c);
1344 	  if (is_global_var (decl)
1345 	      && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1346 	    install_var_local (decl, ctx);
1347 	  break;
1348 
1349 	case OMP_CLAUSE__CACHE_:
1350 	default:
1351 	  gcc_unreachable ();
1352 	}
1353     }
1354 
1355   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1356     {
1357       switch (OMP_CLAUSE_CODE (c))
1358 	{
1359 	case OMP_CLAUSE_LASTPRIVATE:
1360 	  /* Let the corresponding firstprivate clause create
1361 	     the variable.  */
1362 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1363 	    scan_array_reductions = true;
1364 	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1365 	    break;
1366 	  /* FALLTHRU */
1367 
1368 	case OMP_CLAUSE_FIRSTPRIVATE:
1369 	case OMP_CLAUSE_PRIVATE:
1370 	case OMP_CLAUSE_LINEAR:
1371 	case OMP_CLAUSE_IS_DEVICE_PTR:
1372 	  decl = OMP_CLAUSE_DECL (c);
1373 	  if (is_variable_sized (decl))
1374 	    {
1375 	      if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1376 		   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1377 		  && is_gimple_omp_offloaded (ctx->stmt))
1378 		{
1379 		  tree decl2 = DECL_VALUE_EXPR (decl);
1380 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1381 		  decl2 = TREE_OPERAND (decl2, 0);
1382 		  gcc_assert (DECL_P (decl2));
1383 		  install_var_local (decl2, ctx);
1384 		  fixup_remapped_decl (decl2, ctx, false);
1385 		}
1386 	      install_var_local (decl, ctx);
1387 	    }
1388 	  fixup_remapped_decl (decl, ctx,
1389 			       OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1390 			       && OMP_CLAUSE_PRIVATE_DEBUG (c));
1391 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1392 	      && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1393 	    scan_array_reductions = true;
1394 	  break;
1395 
1396 	case OMP_CLAUSE_REDUCTION:
1397 	  decl = OMP_CLAUSE_DECL (c);
1398 	  if (TREE_CODE (decl) != MEM_REF)
1399 	    {
1400 	      if (is_variable_sized (decl))
1401 		install_var_local (decl, ctx);
1402 	      fixup_remapped_decl (decl, ctx, false);
1403 	    }
1404 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1405 	    scan_array_reductions = true;
1406 	  break;
1407 
1408 	case OMP_CLAUSE_SHARED:
1409 	  /* Ignore shared directives in teams construct.  */
1410 	  if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1411 	    break;
1412 	  decl = OMP_CLAUSE_DECL (c);
1413 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1414 	    break;
1415 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1416 	    {
1417 	      if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1418 								 ctx->outer)))
1419 		break;
1420 	      bool by_ref = use_pointer_for_field (decl, ctx);
1421 	      install_var_field (decl, by_ref, 11, ctx);
1422 	      break;
1423 	    }
1424 	  fixup_remapped_decl (decl, ctx, false);
1425 	  break;
1426 
1427 	case OMP_CLAUSE_MAP:
1428 	  if (!is_gimple_omp_offloaded (ctx->stmt))
1429 	    break;
1430 	  decl = OMP_CLAUSE_DECL (c);
1431 	  if (DECL_P (decl)
1432 	      && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1433 		   && (OMP_CLAUSE_MAP_KIND (c)
1434 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1435 		  || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1436 	      && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1437 	      && varpool_node::get_create (decl)->offloadable)
1438 	    break;
1439 	  if (DECL_P (decl))
1440 	    {
1441 	      if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1442 		   || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1443 		  && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1444 		  && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1445 		{
1446 		  tree new_decl = lookup_decl (decl, ctx);
1447 		  TREE_TYPE (new_decl)
1448 		    = remap_type (TREE_TYPE (decl), &ctx->cb);
1449 		}
1450 	      else if (DECL_SIZE (decl)
1451 		       && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1452 		{
1453 		  tree decl2 = DECL_VALUE_EXPR (decl);
1454 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1455 		  decl2 = TREE_OPERAND (decl2, 0);
1456 		  gcc_assert (DECL_P (decl2));
1457 		  fixup_remapped_decl (decl2, ctx, false);
1458 		  fixup_remapped_decl (decl, ctx, true);
1459 		}
1460 	      else
1461 		fixup_remapped_decl (decl, ctx, false);
1462 	    }
1463 	  break;
1464 
1465 	case OMP_CLAUSE_COPYPRIVATE:
1466 	case OMP_CLAUSE_COPYIN:
1467 	case OMP_CLAUSE_DEFAULT:
1468 	case OMP_CLAUSE_IF:
1469 	case OMP_CLAUSE_NUM_THREADS:
1470 	case OMP_CLAUSE_NUM_TEAMS:
1471 	case OMP_CLAUSE_THREAD_LIMIT:
1472 	case OMP_CLAUSE_DEVICE:
1473 	case OMP_CLAUSE_SCHEDULE:
1474 	case OMP_CLAUSE_DIST_SCHEDULE:
1475 	case OMP_CLAUSE_NOWAIT:
1476 	case OMP_CLAUSE_ORDERED:
1477 	case OMP_CLAUSE_COLLAPSE:
1478 	case OMP_CLAUSE_UNTIED:
1479 	case OMP_CLAUSE_FINAL:
1480 	case OMP_CLAUSE_MERGEABLE:
1481 	case OMP_CLAUSE_PROC_BIND:
1482 	case OMP_CLAUSE_SAFELEN:
1483 	case OMP_CLAUSE_SIMDLEN:
1484 	case OMP_CLAUSE_ALIGNED:
1485 	case OMP_CLAUSE_DEPEND:
1486 	case OMP_CLAUSE__LOOPTEMP_:
1487 	case OMP_CLAUSE_TO:
1488 	case OMP_CLAUSE_FROM:
1489 	case OMP_CLAUSE_PRIORITY:
1490 	case OMP_CLAUSE_GRAINSIZE:
1491 	case OMP_CLAUSE_NUM_TASKS:
1492 	case OMP_CLAUSE_THREADS:
1493 	case OMP_CLAUSE_SIMD:
1494 	case OMP_CLAUSE_NOGROUP:
1495 	case OMP_CLAUSE_DEFAULTMAP:
1496 	case OMP_CLAUSE_USE_DEVICE_PTR:
1497 	case OMP_CLAUSE__CILK_FOR_COUNT_:
1498 	case OMP_CLAUSE_ASYNC:
1499 	case OMP_CLAUSE_WAIT:
1500 	case OMP_CLAUSE_NUM_GANGS:
1501 	case OMP_CLAUSE_NUM_WORKERS:
1502 	case OMP_CLAUSE_VECTOR_LENGTH:
1503 	case OMP_CLAUSE_GANG:
1504 	case OMP_CLAUSE_WORKER:
1505 	case OMP_CLAUSE_VECTOR:
1506 	case OMP_CLAUSE_INDEPENDENT:
1507 	case OMP_CLAUSE_AUTO:
1508 	case OMP_CLAUSE_SEQ:
1509 	case OMP_CLAUSE_TILE:
1510 	case OMP_CLAUSE__GRIDDIM_:
1511 	case OMP_CLAUSE__SIMT_:
1512 	  break;
1513 
1514 	case OMP_CLAUSE__CACHE_:
1515 	default:
1516 	  gcc_unreachable ();
1517 	}
1518     }
1519 
1520   gcc_checking_assert (!scan_array_reductions
1521 		       || !is_gimple_omp_oacc (ctx->stmt));
1522   if (scan_array_reductions)
1523     {
1524       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1525 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1526 	    && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1527 	  {
1528 	    scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1529 	    scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1530 	  }
1531 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1532 		 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1533 	  scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1534 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1535 		 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1536 	  scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1537     }
1538 }
1539 
1540 /* Create a new name for omp child function.  Returns an identifier.  If
1541    IS_CILK_FOR is true then the suffix for the child function is
1542    "_cilk_for_fn."  */
1543 
1544 static tree
1545 create_omp_child_function_name (bool task_copy, bool is_cilk_for)
1546 {
1547   if (is_cilk_for)
1548     return clone_function_name (current_function_decl, "_cilk_for_fn");
1549   return clone_function_name (current_function_decl,
1550 			      task_copy ? "_omp_cpyfn" : "_omp_fn");
1551 }
1552 
1553 /* Returns the type of the induction variable for the child function for
1554    _Cilk_for and the types for _high and _low variables based on TYPE.  */
1555 
1556 static tree
1557 cilk_for_check_loop_diff_type (tree type)
1558 {
1559   if (TYPE_PRECISION (type) <= TYPE_PRECISION (uint32_type_node))
1560     {
1561       if (TYPE_UNSIGNED (type))
1562 	return uint32_type_node;
1563       else
1564 	return integer_type_node;
1565     }
1566   else
1567     {
1568       if (TYPE_UNSIGNED (type))
1569 	return uint64_type_node;
1570       else
1571 	return long_long_integer_type_node;
1572     }
1573 }
1574 
1575 /* Return true if CTX may belong to offloaded code: either if current function
1576    is offloaded, or any enclosing context corresponds to a target region.  */
1577 
1578 static bool
1579 omp_maybe_offloaded_ctx (omp_context *ctx)
1580 {
1581   if (cgraph_node::get (current_function_decl)->offloadable)
1582     return true;
1583   for (; ctx; ctx = ctx->outer)
1584     if (is_gimple_omp_offloaded (ctx->stmt))
1585       return true;
1586   return false;
1587 }
1588 
1589 /* Build a decl for the omp child function.  It'll not contain a body
1590    yet, just the bare decl.  */
1591 
1592 static void
1593 create_omp_child_function (omp_context *ctx, bool task_copy)
1594 {
1595   tree decl, type, name, t;
1596 
1597   tree cilk_for_count
1598     = (flag_cilkplus && gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1599       ? omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
1600 			 OMP_CLAUSE__CILK_FOR_COUNT_) : NULL_TREE;
1601   tree cilk_var_type = NULL_TREE;
1602 
1603   name = create_omp_child_function_name (task_copy,
1604 					 cilk_for_count != NULL_TREE);
1605   if (task_copy)
1606     type = build_function_type_list (void_type_node, ptr_type_node,
1607 				     ptr_type_node, NULL_TREE);
1608   else if (cilk_for_count)
1609     {
1610       type = TREE_TYPE (OMP_CLAUSE_OPERAND (cilk_for_count, 0));
1611       cilk_var_type = cilk_for_check_loop_diff_type (type);
1612       type = build_function_type_list (void_type_node, ptr_type_node,
1613 				       cilk_var_type, cilk_var_type, NULL_TREE);
1614     }
1615   else
1616     type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1617 
1618   decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1619 
1620   gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1621 		       || !task_copy);
1622   if (!task_copy)
1623     ctx->cb.dst_fn = decl;
1624   else
1625     gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1626 
1627   TREE_STATIC (decl) = 1;
1628   TREE_USED (decl) = 1;
1629   DECL_ARTIFICIAL (decl) = 1;
1630   DECL_IGNORED_P (decl) = 0;
1631   TREE_PUBLIC (decl) = 0;
1632   DECL_UNINLINABLE (decl) = 1;
1633   DECL_EXTERNAL (decl) = 0;
1634   DECL_CONTEXT (decl) = NULL_TREE;
1635   DECL_INITIAL (decl) = make_node (BLOCK);
1636   BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1637   if (omp_maybe_offloaded_ctx (ctx))
1638     {
1639       cgraph_node::get_create (decl)->offloadable = 1;
1640       if (ENABLE_OFFLOADING)
1641 	g->have_offload = true;
1642     }
1643 
1644   if (cgraph_node::get_create (decl)->offloadable
1645       && !lookup_attribute ("omp declare target",
1646                            DECL_ATTRIBUTES (current_function_decl)))
1647     {
1648       const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1649 				 ? "omp target entrypoint"
1650 				 : "omp declare target");
1651       DECL_ATTRIBUTES (decl)
1652 	= tree_cons (get_identifier (target_attr),
1653 		     NULL_TREE, DECL_ATTRIBUTES (decl));
1654     }
1655 
1656   t = build_decl (DECL_SOURCE_LOCATION (decl),
1657 		  RESULT_DECL, NULL_TREE, void_type_node);
1658   DECL_ARTIFICIAL (t) = 1;
1659   DECL_IGNORED_P (t) = 1;
1660   DECL_CONTEXT (t) = decl;
1661   DECL_RESULT (decl) = t;
1662 
1663   /* _Cilk_for's child function requires two extra parameters called
1664      __low and __high that are set the by Cilk runtime when it calls this
1665      function.  */
1666   if (cilk_for_count)
1667     {
1668       t = build_decl (DECL_SOURCE_LOCATION (decl),
1669 		      PARM_DECL, get_identifier ("__high"), cilk_var_type);
1670       DECL_ARTIFICIAL (t) = 1;
1671       DECL_NAMELESS (t) = 1;
1672       DECL_ARG_TYPE (t) = ptr_type_node;
1673       DECL_CONTEXT (t) = current_function_decl;
1674       TREE_USED (t) = 1;
1675       DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1676       DECL_ARGUMENTS (decl) = t;
1677 
1678       t = build_decl (DECL_SOURCE_LOCATION (decl),
1679 		      PARM_DECL, get_identifier ("__low"), cilk_var_type);
1680       DECL_ARTIFICIAL (t) = 1;
1681       DECL_NAMELESS (t) = 1;
1682       DECL_ARG_TYPE (t) = ptr_type_node;
1683       DECL_CONTEXT (t) = current_function_decl;
1684       TREE_USED (t) = 1;
1685       DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1686       DECL_ARGUMENTS (decl) = t;
1687     }
1688 
1689   tree data_name = get_identifier (".omp_data_i");
1690   t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1691 		  ptr_type_node);
1692   DECL_ARTIFICIAL (t) = 1;
1693   DECL_NAMELESS (t) = 1;
1694   DECL_ARG_TYPE (t) = ptr_type_node;
1695   DECL_CONTEXT (t) = current_function_decl;
1696   TREE_USED (t) = 1;
1697   TREE_READONLY (t) = 1;
1698   if (cilk_for_count)
1699     DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1700   DECL_ARGUMENTS (decl) = t;
1701   if (!task_copy)
1702     ctx->receiver_decl = t;
1703   else
1704     {
1705       t = build_decl (DECL_SOURCE_LOCATION (decl),
1706 		      PARM_DECL, get_identifier (".omp_data_o"),
1707 		      ptr_type_node);
1708       DECL_ARTIFICIAL (t) = 1;
1709       DECL_NAMELESS (t) = 1;
1710       DECL_ARG_TYPE (t) = ptr_type_node;
1711       DECL_CONTEXT (t) = current_function_decl;
1712       TREE_USED (t) = 1;
1713       TREE_ADDRESSABLE (t) = 1;
1714       DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1715       DECL_ARGUMENTS (decl) = t;
1716     }
1717 
1718   /* Allocate memory for the function structure.  The call to
1719      allocate_struct_function clobbers CFUN, so we need to restore
1720      it afterward.  */
1721   push_struct_function (decl);
1722   cfun->function_end_locus = gimple_location (ctx->stmt);
1723   init_tree_ssa (cfun);
1724   pop_cfun ();
1725 }
1726 
1727 /* Callback for walk_gimple_seq.  Check if combined parallel
1728    contains gimple_omp_for_combined_into_p OMP_FOR.  */
1729 
1730 tree
1731 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1732 		       bool *handled_ops_p,
1733 		       struct walk_stmt_info *wi)
1734 {
1735   gimple *stmt = gsi_stmt (*gsi_p);
1736 
1737   *handled_ops_p = true;
1738   switch (gimple_code (stmt))
1739     {
1740     WALK_SUBSTMTS;
1741 
1742     case GIMPLE_OMP_FOR:
1743       if (gimple_omp_for_combined_into_p (stmt)
1744 	  && gimple_omp_for_kind (stmt)
1745 	     == *(const enum gf_mask *) (wi->info))
1746 	{
1747 	  wi->info = stmt;
1748 	  return integer_zero_node;
1749 	}
1750       break;
1751     default:
1752       break;
1753     }
1754   return NULL;
1755 }
1756 
1757 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task.  */
1758 
1759 static void
1760 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1761 			      omp_context *outer_ctx)
1762 {
1763   struct walk_stmt_info wi;
1764 
1765   memset (&wi, 0, sizeof (wi));
1766   wi.val_only = true;
1767   wi.info = (void *) &msk;
1768   walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1769   if (wi.info != (void *) &msk)
1770     {
1771       gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1772       struct omp_for_data fd;
1773       omp_extract_for_data (for_stmt, &fd, NULL);
1774       /* We need two temporaries with fd.loop.v type (istart/iend)
1775 	 and then (fd.collapse - 1) temporaries with the same
1776 	 type for count2 ... countN-1 vars if not constant.  */
1777       size_t count = 2, i;
1778       tree type = fd.iter_type;
1779       if (fd.collapse > 1
1780 	  && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1781 	{
1782 	  count += fd.collapse - 1;
1783 	  /* If there are lastprivate clauses on the inner
1784 	     GIMPLE_OMP_FOR, add one more temporaries for the total number
1785 	     of iterations (product of count1 ... countN-1).  */
1786 	  if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1787 			       OMP_CLAUSE_LASTPRIVATE))
1788 	    count++;
1789 	  else if (msk == GF_OMP_FOR_KIND_FOR
1790 		   && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1791 				       OMP_CLAUSE_LASTPRIVATE))
1792 	    count++;
1793 	}
1794       for (i = 0; i < count; i++)
1795 	{
1796 	  tree temp = create_tmp_var (type);
1797 	  tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1798 	  insert_decl_map (&outer_ctx->cb, temp, temp);
1799 	  OMP_CLAUSE_DECL (c) = temp;
1800 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1801 	  gimple_omp_taskreg_set_clauses (stmt, c);
1802 	}
1803     }
1804 }
1805 
1806 /* Scan an OpenMP parallel directive.  */
1807 
1808 static void
1809 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1810 {
1811   omp_context *ctx;
1812   tree name;
1813   gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1814 
1815   /* Ignore parallel directives with empty bodies, unless there
1816      are copyin clauses.  */
1817   if (optimize > 0
1818       && empty_body_p (gimple_omp_body (stmt))
1819       && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1820 			  OMP_CLAUSE_COPYIN) == NULL)
1821     {
1822       gsi_replace (gsi, gimple_build_nop (), false);
1823       return;
1824     }
1825 
1826   if (gimple_omp_parallel_combined_p (stmt))
1827     add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1828 
1829   ctx = new_omp_context (stmt, outer_ctx);
1830   taskreg_contexts.safe_push (ctx);
1831   if (taskreg_nesting_level > 1)
1832     ctx->is_nested = true;
1833   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1834   ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1835   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1836   name = create_tmp_var_name (".omp_data_s");
1837   name = build_decl (gimple_location (stmt),
1838 		     TYPE_DECL, name, ctx->record_type);
1839   DECL_ARTIFICIAL (name) = 1;
1840   DECL_NAMELESS (name) = 1;
1841   TYPE_NAME (ctx->record_type) = name;
1842   TYPE_ARTIFICIAL (ctx->record_type) = 1;
1843   if (!gimple_omp_parallel_grid_phony (stmt))
1844     {
1845       create_omp_child_function (ctx, false);
1846       gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1847     }
1848 
1849   scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1850   scan_omp (gimple_omp_body_ptr (stmt), ctx);
1851 
1852   if (TYPE_FIELDS (ctx->record_type) == NULL)
1853     ctx->record_type = ctx->receiver_decl = NULL;
1854 }
1855 
1856 /* Scan an OpenMP task directive.  */
1857 
1858 static void
1859 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1860 {
1861   omp_context *ctx;
1862   tree name, t;
1863   gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1864 
1865   /* Ignore task directives with empty bodies, unless they have depend
1866      clause.  */
1867   if (optimize > 0
1868       && empty_body_p (gimple_omp_body (stmt))
1869       && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1870     {
1871       gsi_replace (gsi, gimple_build_nop (), false);
1872       return;
1873     }
1874 
1875   if (gimple_omp_task_taskloop_p (stmt))
1876     add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1877 
1878   ctx = new_omp_context (stmt, outer_ctx);
1879   taskreg_contexts.safe_push (ctx);
1880   if (taskreg_nesting_level > 1)
1881     ctx->is_nested = true;
1882   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1883   ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
1884   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1885   name = create_tmp_var_name (".omp_data_s");
1886   name = build_decl (gimple_location (stmt),
1887 		     TYPE_DECL, name, ctx->record_type);
1888   DECL_ARTIFICIAL (name) = 1;
1889   DECL_NAMELESS (name) = 1;
1890   TYPE_NAME (ctx->record_type) = name;
1891   TYPE_ARTIFICIAL (ctx->record_type) = 1;
1892   create_omp_child_function (ctx, false);
1893   gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1894 
1895   scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1896 
1897   if (ctx->srecord_type)
1898     {
1899       name = create_tmp_var_name (".omp_data_a");
1900       name = build_decl (gimple_location (stmt),
1901 			 TYPE_DECL, name, ctx->srecord_type);
1902       DECL_ARTIFICIAL (name) = 1;
1903       DECL_NAMELESS (name) = 1;
1904       TYPE_NAME (ctx->srecord_type) = name;
1905       TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1906       create_omp_child_function (ctx, true);
1907     }
1908 
1909   scan_omp (gimple_omp_body_ptr (stmt), ctx);
1910 
1911   if (TYPE_FIELDS (ctx->record_type) == NULL)
1912     {
1913       ctx->record_type = ctx->receiver_decl = NULL;
1914       t = build_int_cst (long_integer_type_node, 0);
1915       gimple_omp_task_set_arg_size (stmt, t);
1916       t = build_int_cst (long_integer_type_node, 1);
1917       gimple_omp_task_set_arg_align (stmt, t);
1918     }
1919 }
1920 
1921 /* Helper function for finish_taskreg_scan, called through walk_tree.
1922    If maybe_lookup_decl_in_outer_context returns non-NULL for some
1923    tree, replace it in the expression.  */
1924 
1925 static tree
1926 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1927 {
1928   if (VAR_P (*tp))
1929     {
1930       omp_context *ctx = (omp_context *) data;
1931       tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1932       if (t != *tp)
1933 	{
1934 	  if (DECL_HAS_VALUE_EXPR_P (t))
1935 	    t = unshare_expr (DECL_VALUE_EXPR (t));
1936 	  *tp = t;
1937 	}
1938       *walk_subtrees = 0;
1939     }
1940   else if (IS_TYPE_OR_DECL_P (*tp))
1941     *walk_subtrees = 0;
1942   return NULL_TREE;
1943 }
1944 
1945 /* If any decls have been made addressable during scan_omp,
1946    adjust their fields if needed, and layout record types
1947    of parallel/task constructs.  */
1948 
1949 static void
1950 finish_taskreg_scan (omp_context *ctx)
1951 {
1952   if (ctx->record_type == NULL_TREE)
1953     return;
1954 
1955   /* If any task_shared_vars were needed, verify all
1956      OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1957      statements if use_pointer_for_field hasn't changed
1958      because of that.  If it did, update field types now.  */
1959   if (task_shared_vars)
1960     {
1961       tree c;
1962 
1963       for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1964 	   c; c = OMP_CLAUSE_CHAIN (c))
1965 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1966 	    && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1967 	  {
1968 	    tree decl = OMP_CLAUSE_DECL (c);
1969 
1970 	    /* Global variables don't need to be copied,
1971 	       the receiver side will use them directly.  */
1972 	    if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1973 	      continue;
1974 	    if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1975 		|| !use_pointer_for_field (decl, ctx))
1976 	      continue;
1977 	    tree field = lookup_field (decl, ctx);
1978 	    if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1979 		&& TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1980 	      continue;
1981 	    TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1982 	    TREE_THIS_VOLATILE (field) = 0;
1983 	    DECL_USER_ALIGN (field) = 0;
1984 	    SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1985 	    if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1986 	      SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1987 	    if (ctx->srecord_type)
1988 	      {
1989 		tree sfield = lookup_sfield (decl, ctx);
1990 		TREE_TYPE (sfield) = TREE_TYPE (field);
1991 		TREE_THIS_VOLATILE (sfield) = 0;
1992 		DECL_USER_ALIGN (sfield) = 0;
1993 		SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1994 		if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1995 		  SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1996 	      }
1997 	  }
1998     }
1999 
2000   if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2001     {
2002       layout_type (ctx->record_type);
2003       fixup_child_record_type (ctx);
2004     }
2005   else
2006     {
2007       location_t loc = gimple_location (ctx->stmt);
2008       tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2009       /* Move VLA fields to the end.  */
2010       p = &TYPE_FIELDS (ctx->record_type);
2011       while (*p)
2012 	if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2013 	    || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2014 	  {
2015 	    *q = *p;
2016 	    *p = TREE_CHAIN (*p);
2017 	    TREE_CHAIN (*q) = NULL_TREE;
2018 	    q = &TREE_CHAIN (*q);
2019 	  }
2020 	else
2021 	  p = &DECL_CHAIN (*p);
2022       *p = vla_fields;
2023       if (gimple_omp_task_taskloop_p (ctx->stmt))
2024 	{
2025 	  /* Move fields corresponding to first and second _looptemp_
2026 	     clause first.  There are filled by GOMP_taskloop
2027 	     and thus need to be in specific positions.  */
2028 	  tree c1 = gimple_omp_task_clauses (ctx->stmt);
2029 	  c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
2030 	  tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2031 				     OMP_CLAUSE__LOOPTEMP_);
2032 	  tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2033 	  tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2034 	  p = &TYPE_FIELDS (ctx->record_type);
2035 	  while (*p)
2036 	    if (*p == f1 || *p == f2)
2037 	      *p = DECL_CHAIN (*p);
2038 	    else
2039 	      p = &DECL_CHAIN (*p);
2040 	  DECL_CHAIN (f1) = f2;
2041 	  DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2042 	  TYPE_FIELDS (ctx->record_type) = f1;
2043 	  if (ctx->srecord_type)
2044 	    {
2045 	      f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2046 	      f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2047 	      p = &TYPE_FIELDS (ctx->srecord_type);
2048 	      while (*p)
2049 		if (*p == f1 || *p == f2)
2050 		  *p = DECL_CHAIN (*p);
2051 		else
2052 		  p = &DECL_CHAIN (*p);
2053 	      DECL_CHAIN (f1) = f2;
2054 	      DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2055 	      TYPE_FIELDS (ctx->srecord_type) = f1;
2056 	    }
2057 	}
2058       layout_type (ctx->record_type);
2059       fixup_child_record_type (ctx);
2060       if (ctx->srecord_type)
2061 	layout_type (ctx->srecord_type);
2062       tree t = fold_convert_loc (loc, long_integer_type_node,
2063 				 TYPE_SIZE_UNIT (ctx->record_type));
2064       if (TREE_CODE (t) != INTEGER_CST)
2065 	{
2066 	  t = unshare_expr (t);
2067 	  walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2068 	}
2069       gimple_omp_task_set_arg_size (ctx->stmt, t);
2070       t = build_int_cst (long_integer_type_node,
2071 			 TYPE_ALIGN_UNIT (ctx->record_type));
2072       gimple_omp_task_set_arg_align (ctx->stmt, t);
2073     }
2074 }
2075 
2076 /* Find the enclosing offload context.  */
2077 
2078 static omp_context *
2079 enclosing_target_ctx (omp_context *ctx)
2080 {
2081   for (; ctx; ctx = ctx->outer)
2082     if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2083       break;
2084 
2085   return ctx;
2086 }
2087 
2088 /* Return true if ctx is part of an oacc kernels region.  */
2089 
2090 static bool
2091 ctx_in_oacc_kernels_region (omp_context *ctx)
2092 {
2093   for (;ctx != NULL; ctx = ctx->outer)
2094     {
2095       gimple *stmt = ctx->stmt;
2096       if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2097 	  && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2098 	return true;
2099     }
2100 
2101   return false;
2102 }
2103 
2104 /* Check the parallelism clauses inside a kernels regions.
2105    Until kernels handling moves to use the same loop indirection
2106    scheme as parallel, we need to do this checking early.  */
2107 
2108 static unsigned
2109 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2110 {
2111   bool checking = true;
2112   unsigned outer_mask = 0;
2113   unsigned this_mask = 0;
2114   bool has_seq = false, has_auto = false;
2115 
2116   if (ctx->outer)
2117     outer_mask = check_oacc_kernel_gwv (NULL,  ctx->outer);
2118   if (!stmt)
2119     {
2120       checking = false;
2121       if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2122 	return outer_mask;
2123       stmt = as_a <gomp_for *> (ctx->stmt);
2124     }
2125 
2126   for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2127     {
2128       switch (OMP_CLAUSE_CODE (c))
2129 	{
2130 	case OMP_CLAUSE_GANG:
2131 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2132 	  break;
2133 	case OMP_CLAUSE_WORKER:
2134 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2135 	  break;
2136 	case OMP_CLAUSE_VECTOR:
2137 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2138 	  break;
2139 	case OMP_CLAUSE_SEQ:
2140 	  has_seq = true;
2141 	  break;
2142 	case OMP_CLAUSE_AUTO:
2143 	  has_auto = true;
2144 	  break;
2145 	default:
2146 	  break;
2147 	}
2148     }
2149 
2150   if (checking)
2151     {
2152       if (has_seq && (this_mask || has_auto))
2153 	error_at (gimple_location (stmt), "%<seq%> overrides other"
2154 		  " OpenACC loop specifiers");
2155       else if (has_auto && this_mask)
2156 	error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2157 		  " OpenACC loop specifiers");
2158 
2159       if (this_mask & outer_mask)
2160 	error_at (gimple_location (stmt), "inner loop uses same"
2161 		  " OpenACC parallelism as containing loop");
2162     }
2163 
2164   return outer_mask | this_mask;
2165 }
2166 
2167 /* Scan a GIMPLE_OMP_FOR.  */
2168 
2169 static omp_context *
2170 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2171 {
2172   omp_context *ctx;
2173   size_t i;
2174   tree clauses = gimple_omp_for_clauses (stmt);
2175 
2176   ctx = new_omp_context (stmt, outer_ctx);
2177 
2178   if (is_gimple_omp_oacc (stmt))
2179     {
2180       omp_context *tgt = enclosing_target_ctx (outer_ctx);
2181 
2182       if (!tgt || is_oacc_parallel (tgt))
2183 	for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2184 	  {
2185 	    char const *check = NULL;
2186 
2187 	    switch (OMP_CLAUSE_CODE (c))
2188 	      {
2189 	      case OMP_CLAUSE_GANG:
2190 		check = "gang";
2191 		break;
2192 
2193 	      case OMP_CLAUSE_WORKER:
2194 		check = "worker";
2195 		break;
2196 
2197 	      case OMP_CLAUSE_VECTOR:
2198 		check = "vector";
2199 		break;
2200 
2201 	      default:
2202 		break;
2203 	      }
2204 
2205 	    if (check && OMP_CLAUSE_OPERAND (c, 0))
2206 	      error_at (gimple_location (stmt),
2207 			"argument not permitted on %qs clause in"
2208 			" OpenACC %<parallel%>", check);
2209 	  }
2210 
2211       if (tgt && is_oacc_kernels (tgt))
2212 	{
2213 	  /* Strip out reductions, as they are not  handled yet.  */
2214 	  tree *prev_ptr = &clauses;
2215 
2216 	  while (tree probe = *prev_ptr)
2217 	    {
2218 	      tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2219 
2220 	      if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2221 		*prev_ptr = *next_ptr;
2222 	      else
2223 		prev_ptr = next_ptr;
2224 	    }
2225 
2226 	  gimple_omp_for_set_clauses (stmt, clauses);
2227 	  check_oacc_kernel_gwv (stmt, ctx);
2228 	}
2229     }
2230 
2231   scan_sharing_clauses (clauses, ctx);
2232 
2233   scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2234   for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2235     {
2236       scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2237       scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2238       scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2239       scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2240     }
2241   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2242   return ctx;
2243 }
2244 
2245 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD.  */
2246 
2247 static void
2248 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2249 	       omp_context *outer_ctx)
2250 {
2251   gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2252   gsi_replace (gsi, bind, false);
2253   gimple_seq seq = NULL;
2254   gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2255   tree cond = create_tmp_var_raw (integer_type_node);
2256   DECL_CONTEXT (cond) = current_function_decl;
2257   DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2258   gimple_bind_set_vars (bind, cond);
2259   gimple_call_set_lhs (g, cond);
2260   gimple_seq_add_stmt (&seq, g);
2261   tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2262   tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2263   tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2264   g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2265   gimple_seq_add_stmt (&seq, g);
2266   g = gimple_build_label (lab1);
2267   gimple_seq_add_stmt (&seq, g);
2268   gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2269   gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2270   tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2271   OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2272   gimple_omp_for_set_clauses (new_stmt, clause);
2273   gimple_seq_add_stmt (&seq, new_stmt);
2274   g = gimple_build_goto (lab3);
2275   gimple_seq_add_stmt (&seq, g);
2276   g = gimple_build_label (lab2);
2277   gimple_seq_add_stmt (&seq, g);
2278   gimple_seq_add_stmt (&seq, stmt);
2279   g = gimple_build_label (lab3);
2280   gimple_seq_add_stmt (&seq, g);
2281   gimple_bind_set_body (bind, seq);
2282   update_stmt (bind);
2283   scan_omp_for (new_stmt, outer_ctx);
2284   scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2285 }
2286 
2287 /* Scan an OpenMP sections directive.  */
2288 
2289 static void
2290 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2291 {
2292   omp_context *ctx;
2293 
2294   ctx = new_omp_context (stmt, outer_ctx);
2295   scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2296   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2297 }
2298 
2299 /* Scan an OpenMP single directive.  */
2300 
2301 static void
2302 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2303 {
2304   omp_context *ctx;
2305   tree name;
2306 
2307   ctx = new_omp_context (stmt, outer_ctx);
2308   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2309   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2310   name = create_tmp_var_name (".omp_copy_s");
2311   name = build_decl (gimple_location (stmt),
2312 		     TYPE_DECL, name, ctx->record_type);
2313   TYPE_NAME (ctx->record_type) = name;
2314 
2315   scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2316   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2317 
2318   if (TYPE_FIELDS (ctx->record_type) == NULL)
2319     ctx->record_type = NULL;
2320   else
2321     layout_type (ctx->record_type);
2322 }
2323 
2324 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2325    used in the corresponding offloaded function are restrict.  */
2326 
2327 static bool
2328 omp_target_base_pointers_restrict_p (tree clauses)
2329 {
2330   /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2331      used by OpenACC.  */
2332   if (flag_openacc == 0)
2333     return false;
2334 
2335   /* I.  Basic example:
2336 
2337        void foo (void)
2338        {
2339 	 unsigned int a[2], b[2];
2340 
2341 	 #pragma acc kernels \
2342 	   copyout (a) \
2343 	   copyout (b)
2344 	 {
2345 	   a[0] = 0;
2346 	   b[0] = 1;
2347 	 }
2348        }
2349 
2350      After gimplification, we have:
2351 
2352        #pragma omp target oacc_kernels \
2353 	 map(force_from:a [len: 8]) \
2354 	 map(force_from:b [len: 8])
2355        {
2356 	 a[0] = 0;
2357 	 b[0] = 1;
2358        }
2359 
2360      Because both mappings have the force prefix, we know that they will be
2361      allocated when calling the corresponding offloaded function, which means we
2362      can mark the base pointers for a and b in the offloaded function as
2363      restrict.  */
2364 
2365   tree c;
2366   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2367     {
2368       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2369 	return false;
2370 
2371       switch (OMP_CLAUSE_MAP_KIND (c))
2372 	{
2373 	case GOMP_MAP_FORCE_ALLOC:
2374 	case GOMP_MAP_FORCE_TO:
2375 	case GOMP_MAP_FORCE_FROM:
2376 	case GOMP_MAP_FORCE_TOFROM:
2377 	  break;
2378 	default:
2379 	  return false;
2380 	}
2381     }
2382 
2383   return true;
2384 }
2385 
2386 /* Scan a GIMPLE_OMP_TARGET.  */
2387 
2388 static void
2389 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2390 {
2391   omp_context *ctx;
2392   tree name;
2393   bool offloaded = is_gimple_omp_offloaded (stmt);
2394   tree clauses = gimple_omp_target_clauses (stmt);
2395 
2396   ctx = new_omp_context (stmt, outer_ctx);
2397   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2398   ctx->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
2399   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2400   name = create_tmp_var_name (".omp_data_t");
2401   name = build_decl (gimple_location (stmt),
2402 		     TYPE_DECL, name, ctx->record_type);
2403   DECL_ARTIFICIAL (name) = 1;
2404   DECL_NAMELESS (name) = 1;
2405   TYPE_NAME (ctx->record_type) = name;
2406   TYPE_ARTIFICIAL (ctx->record_type) = 1;
2407 
2408   bool base_pointers_restrict = false;
2409   if (offloaded)
2410     {
2411       create_omp_child_function (ctx, false);
2412       gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2413 
2414       base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2415       if (base_pointers_restrict
2416 	  && dump_file && (dump_flags & TDF_DETAILS))
2417 	fprintf (dump_file,
2418 		 "Base pointers in offloaded function are restrict\n");
2419     }
2420 
2421   scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2422   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2423 
2424   if (TYPE_FIELDS (ctx->record_type) == NULL)
2425     ctx->record_type = ctx->receiver_decl = NULL;
2426   else
2427     {
2428       TYPE_FIELDS (ctx->record_type)
2429 	= nreverse (TYPE_FIELDS (ctx->record_type));
2430       if (flag_checking)
2431 	{
2432 	  unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2433 	  for (tree field = TYPE_FIELDS (ctx->record_type);
2434 	       field;
2435 	       field = DECL_CHAIN (field))
2436 	    gcc_assert (DECL_ALIGN (field) == align);
2437 	}
2438       layout_type (ctx->record_type);
2439       if (offloaded)
2440 	fixup_child_record_type (ctx);
2441     }
2442 }
2443 
2444 /* Scan an OpenMP teams directive.  */
2445 
2446 static void
2447 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2448 {
2449   omp_context *ctx = new_omp_context (stmt, outer_ctx);
2450   scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2451   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2452 }
2453 
2454 /* Check nesting restrictions.  */
2455 static bool
2456 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2457 {
2458   tree c;
2459 
2460   if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2461     /* GRID_BODY is an artificial construct, nesting rules will be checked in
2462        the original copy of its contents.  */
2463     return true;
2464 
2465   /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2466      inside an OpenACC CTX.  */
2467   if (!(is_gimple_omp (stmt)
2468 	&& is_gimple_omp_oacc (stmt))
2469       /* Except for atomic codes that we share with OpenMP.  */
2470       && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2471 	   || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2472     {
2473       if (oacc_get_fn_attrib (cfun->decl) != NULL)
2474 	{
2475 	  error_at (gimple_location (stmt),
2476 		    "non-OpenACC construct inside of OpenACC routine");
2477 	  return false;
2478 	}
2479       else
2480 	for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2481 	  if (is_gimple_omp (octx->stmt)
2482 	      && is_gimple_omp_oacc (octx->stmt))
2483 	    {
2484 	      error_at (gimple_location (stmt),
2485 			"non-OpenACC construct inside of OpenACC region");
2486 	      return false;
2487 	    }
2488     }
2489 
2490   if (ctx != NULL)
2491     {
2492       if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2493 	  && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2494 	{
2495 	  c = NULL_TREE;
2496 	  if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2497 	    {
2498 	      c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2499 	      if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2500 		{
2501 		  if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2502 		      && (ctx->outer == NULL
2503 			  || !gimple_omp_for_combined_into_p (ctx->stmt)
2504 			  || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2505 			  || (gimple_omp_for_kind (ctx->outer->stmt)
2506 			      != GF_OMP_FOR_KIND_FOR)
2507 			  || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2508 		    {
2509 		      error_at (gimple_location (stmt),
2510 				"%<ordered simd threads%> must be closely "
2511 				"nested inside of %<for simd%> region");
2512 		      return false;
2513 		    }
2514 		  return true;
2515 		}
2516 	    }
2517 	  error_at (gimple_location (stmt),
2518 		    "OpenMP constructs other than %<#pragma omp ordered simd%>"
2519 		    " may not be nested inside %<simd%> region");
2520 	  return false;
2521 	}
2522       else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2523 	{
2524 	  if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2525 	       || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2526 		   && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2527 	      && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2528 	    {
2529 	      error_at (gimple_location (stmt),
2530 			"only %<distribute%> or %<parallel%> regions are "
2531 			"allowed to be strictly nested inside %<teams%> "
2532 			"region");
2533 	      return false;
2534 	    }
2535 	}
2536     }
2537   switch (gimple_code (stmt))
2538     {
2539     case GIMPLE_OMP_FOR:
2540       if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2541 	return true;
2542       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2543 	{
2544 	  if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2545 	    {
2546 	      error_at (gimple_location (stmt),
2547 			"%<distribute%> region must be strictly nested "
2548 			"inside %<teams%> construct");
2549 	      return false;
2550 	    }
2551 	  return true;
2552 	}
2553       /* We split taskloop into task and nested taskloop in it.  */
2554       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2555 	return true;
2556       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2557 	{
2558 	  bool ok = false;
2559 
2560 	  if (ctx)
2561 	    switch (gimple_code (ctx->stmt))
2562 	      {
2563 	      case GIMPLE_OMP_FOR:
2564 		ok = (gimple_omp_for_kind (ctx->stmt)
2565 		      == GF_OMP_FOR_KIND_OACC_LOOP);
2566 		break;
2567 
2568 	      case GIMPLE_OMP_TARGET:
2569 		switch (gimple_omp_target_kind (ctx->stmt))
2570 		  {
2571 		  case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2572 		  case GF_OMP_TARGET_KIND_OACC_KERNELS:
2573 		    ok = true;
2574 		    break;
2575 
2576 		  default:
2577 		    break;
2578 		  }
2579 
2580 	      default:
2581 		break;
2582 	      }
2583 	  else if (oacc_get_fn_attrib (current_function_decl))
2584 	    ok = true;
2585 	  if (!ok)
2586 	    {
2587 	      error_at (gimple_location (stmt),
2588 			"OpenACC loop directive must be associated with"
2589 			" an OpenACC compute region");
2590 	      return false;
2591 	    }
2592 	}
2593       /* FALLTHRU */
2594     case GIMPLE_CALL:
2595       if (is_gimple_call (stmt)
2596 	  && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2597 	      == BUILT_IN_GOMP_CANCEL
2598 	      || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2599 		 == BUILT_IN_GOMP_CANCELLATION_POINT))
2600 	{
2601 	  const char *bad = NULL;
2602 	  const char *kind = NULL;
2603 	  const char *construct
2604 	    = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2605 	       == BUILT_IN_GOMP_CANCEL)
2606 	      ? "#pragma omp cancel"
2607 	      : "#pragma omp cancellation point";
2608 	  if (ctx == NULL)
2609 	    {
2610 	      error_at (gimple_location (stmt), "orphaned %qs construct",
2611 			construct);
2612 	      return false;
2613 	    }
2614 	  switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2615 		  ? tree_to_shwi (gimple_call_arg (stmt, 0))
2616 		  : 0)
2617 	    {
2618 	    case 1:
2619 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2620 		bad = "#pragma omp parallel";
2621 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2622 		       == BUILT_IN_GOMP_CANCEL
2623 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2624 		ctx->cancellable = true;
2625 	      kind = "parallel";
2626 	      break;
2627 	    case 2:
2628 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2629 		  || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2630 		bad = "#pragma omp for";
2631 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2632 		       == BUILT_IN_GOMP_CANCEL
2633 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2634 		{
2635 		  ctx->cancellable = true;
2636 		  if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2637 				       OMP_CLAUSE_NOWAIT))
2638 		    warning_at (gimple_location (stmt), 0,
2639 				"%<#pragma omp cancel for%> inside "
2640 				"%<nowait%> for construct");
2641 		  if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2642 				       OMP_CLAUSE_ORDERED))
2643 		    warning_at (gimple_location (stmt), 0,
2644 				"%<#pragma omp cancel for%> inside "
2645 				"%<ordered%> for construct");
2646 		}
2647 	      kind = "for";
2648 	      break;
2649 	    case 4:
2650 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2651 		  && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2652 		bad = "#pragma omp sections";
2653 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2654 		       == BUILT_IN_GOMP_CANCEL
2655 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2656 		{
2657 		  if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2658 		    {
2659 		      ctx->cancellable = true;
2660 		      if (omp_find_clause (gimple_omp_sections_clauses
2661 								(ctx->stmt),
2662 					   OMP_CLAUSE_NOWAIT))
2663 			warning_at (gimple_location (stmt), 0,
2664 				    "%<#pragma omp cancel sections%> inside "
2665 				    "%<nowait%> sections construct");
2666 		    }
2667 		  else
2668 		    {
2669 		      gcc_assert (ctx->outer
2670 				  && gimple_code (ctx->outer->stmt)
2671 				     == GIMPLE_OMP_SECTIONS);
2672 		      ctx->outer->cancellable = true;
2673 		      if (omp_find_clause (gimple_omp_sections_clauses
2674 							(ctx->outer->stmt),
2675 					   OMP_CLAUSE_NOWAIT))
2676 			warning_at (gimple_location (stmt), 0,
2677 				    "%<#pragma omp cancel sections%> inside "
2678 				    "%<nowait%> sections construct");
2679 		    }
2680 		}
2681 	      kind = "sections";
2682 	      break;
2683 	    case 8:
2684 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2685 		bad = "#pragma omp task";
2686 	      else
2687 		{
2688 		  for (omp_context *octx = ctx->outer;
2689 		       octx; octx = octx->outer)
2690 		    {
2691 		      switch (gimple_code (octx->stmt))
2692 			{
2693 			case GIMPLE_OMP_TASKGROUP:
2694 			  break;
2695 			case GIMPLE_OMP_TARGET:
2696 			  if (gimple_omp_target_kind (octx->stmt)
2697 			      != GF_OMP_TARGET_KIND_REGION)
2698 			    continue;
2699 			  /* FALLTHRU */
2700 			case GIMPLE_OMP_PARALLEL:
2701 			case GIMPLE_OMP_TEAMS:
2702 			  error_at (gimple_location (stmt),
2703 				    "%<%s taskgroup%> construct not closely "
2704 				    "nested inside of %<taskgroup%> region",
2705 				    construct);
2706 			  return false;
2707 			default:
2708 			  continue;
2709 			}
2710 		      break;
2711 		    }
2712 		  ctx->cancellable = true;
2713 		}
2714 	      kind = "taskgroup";
2715 	      break;
2716 	    default:
2717 	      error_at (gimple_location (stmt), "invalid arguments");
2718 	      return false;
2719 	    }
2720 	  if (bad)
2721 	    {
2722 	      error_at (gimple_location (stmt),
2723 			"%<%s %s%> construct not closely nested inside of %qs",
2724 			construct, kind, bad);
2725 	      return false;
2726 	    }
2727 	}
2728       /* FALLTHRU */
2729     case GIMPLE_OMP_SECTIONS:
2730     case GIMPLE_OMP_SINGLE:
2731       for (; ctx != NULL; ctx = ctx->outer)
2732 	switch (gimple_code (ctx->stmt))
2733 	  {
2734 	  case GIMPLE_OMP_FOR:
2735 	    if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2736 		&& gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2737 	      break;
2738 	    /* FALLTHRU */
2739 	  case GIMPLE_OMP_SECTIONS:
2740 	  case GIMPLE_OMP_SINGLE:
2741 	  case GIMPLE_OMP_ORDERED:
2742 	  case GIMPLE_OMP_MASTER:
2743 	  case GIMPLE_OMP_TASK:
2744 	  case GIMPLE_OMP_CRITICAL:
2745 	    if (is_gimple_call (stmt))
2746 	      {
2747 		if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2748 		    != BUILT_IN_GOMP_BARRIER)
2749 		  return true;
2750 		error_at (gimple_location (stmt),
2751 			  "barrier region may not be closely nested inside "
2752 			  "of work-sharing, %<critical%>, %<ordered%>, "
2753 			  "%<master%>, explicit %<task%> or %<taskloop%> "
2754 			  "region");
2755 		return false;
2756 	      }
2757 	    error_at (gimple_location (stmt),
2758 		      "work-sharing region may not be closely nested inside "
2759 		      "of work-sharing, %<critical%>, %<ordered%>, "
2760 		      "%<master%>, explicit %<task%> or %<taskloop%> region");
2761 	    return false;
2762 	  case GIMPLE_OMP_PARALLEL:
2763 	  case GIMPLE_OMP_TEAMS:
2764 	    return true;
2765 	  case GIMPLE_OMP_TARGET:
2766 	    if (gimple_omp_target_kind (ctx->stmt)
2767 		== GF_OMP_TARGET_KIND_REGION)
2768 	      return true;
2769 	    break;
2770 	  default:
2771 	    break;
2772 	  }
2773       break;
2774     case GIMPLE_OMP_MASTER:
2775       for (; ctx != NULL; ctx = ctx->outer)
2776 	switch (gimple_code (ctx->stmt))
2777 	  {
2778 	  case GIMPLE_OMP_FOR:
2779 	    if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2780 		&& gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2781 	      break;
2782 	    /* FALLTHRU */
2783 	  case GIMPLE_OMP_SECTIONS:
2784 	  case GIMPLE_OMP_SINGLE:
2785 	  case GIMPLE_OMP_TASK:
2786 	    error_at (gimple_location (stmt),
2787 		      "%<master%> region may not be closely nested inside "
2788 		      "of work-sharing, explicit %<task%> or %<taskloop%> "
2789 		      "region");
2790 	    return false;
2791 	  case GIMPLE_OMP_PARALLEL:
2792 	  case GIMPLE_OMP_TEAMS:
2793 	    return true;
2794 	  case GIMPLE_OMP_TARGET:
2795 	    if (gimple_omp_target_kind (ctx->stmt)
2796 		== GF_OMP_TARGET_KIND_REGION)
2797 	      return true;
2798 	    break;
2799 	  default:
2800 	    break;
2801 	  }
2802       break;
2803     case GIMPLE_OMP_TASK:
2804       for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2805 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2806 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2807 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2808 	  {
2809 	    enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2810 	    error_at (OMP_CLAUSE_LOCATION (c),
2811 		      "%<depend(%s)%> is only allowed in %<omp ordered%>",
2812 		      kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2813 	    return false;
2814 	  }
2815       break;
2816     case GIMPLE_OMP_ORDERED:
2817       for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2818 	   c; c = OMP_CLAUSE_CHAIN (c))
2819 	{
2820 	  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2821 	    {
2822 	      gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2823 			  || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2824 	      continue;
2825 	    }
2826 	  enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2827 	  if (kind == OMP_CLAUSE_DEPEND_SOURCE
2828 	      || kind == OMP_CLAUSE_DEPEND_SINK)
2829 	    {
2830 	      tree oclause;
2831 	      /* Look for containing ordered(N) loop.  */
2832 	      if (ctx == NULL
2833 		  || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2834 		  || (oclause
2835 			= omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2836 					   OMP_CLAUSE_ORDERED)) == NULL_TREE)
2837 		{
2838 		  error_at (OMP_CLAUSE_LOCATION (c),
2839 			    "%<ordered%> construct with %<depend%> clause "
2840 			    "must be closely nested inside an %<ordered%> "
2841 			    "loop");
2842 		  return false;
2843 		}
2844 	      else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2845 		{
2846 		  error_at (OMP_CLAUSE_LOCATION (c),
2847 			    "%<ordered%> construct with %<depend%> clause "
2848 			    "must be closely nested inside a loop with "
2849 			    "%<ordered%> clause with a parameter");
2850 		  return false;
2851 		}
2852 	    }
2853 	  else
2854 	    {
2855 	      error_at (OMP_CLAUSE_LOCATION (c),
2856 			"invalid depend kind in omp %<ordered%> %<depend%>");
2857 	      return false;
2858 	    }
2859 	}
2860       c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2861       if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2862 	{
2863 	  /* ordered simd must be closely nested inside of simd region,
2864 	     and simd region must not encounter constructs other than
2865 	     ordered simd, therefore ordered simd may be either orphaned,
2866 	     or ctx->stmt must be simd.  The latter case is handled already
2867 	     earlier.  */
2868 	  if (ctx != NULL)
2869 	    {
2870 	      error_at (gimple_location (stmt),
2871 			"%<ordered%> %<simd%> must be closely nested inside "
2872 			"%<simd%> region");
2873 	      return false;
2874 	    }
2875 	}
2876       for (; ctx != NULL; ctx = ctx->outer)
2877 	switch (gimple_code (ctx->stmt))
2878 	  {
2879 	  case GIMPLE_OMP_CRITICAL:
2880 	  case GIMPLE_OMP_TASK:
2881 	  case GIMPLE_OMP_ORDERED:
2882 	  ordered_in_taskloop:
2883 	    error_at (gimple_location (stmt),
2884 		      "%<ordered%> region may not be closely nested inside "
2885 		      "of %<critical%>, %<ordered%>, explicit %<task%> or "
2886 		      "%<taskloop%> region");
2887 	    return false;
2888 	  case GIMPLE_OMP_FOR:
2889 	    if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2890 	      goto ordered_in_taskloop;
2891 	    if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2892 				 OMP_CLAUSE_ORDERED) == NULL)
2893 	      {
2894 		error_at (gimple_location (stmt),
2895 			  "%<ordered%> region must be closely nested inside "
2896 			  "a loop region with an %<ordered%> clause");
2897 		return false;
2898 	      }
2899 	    return true;
2900 	  case GIMPLE_OMP_TARGET:
2901 	    if (gimple_omp_target_kind (ctx->stmt)
2902 		!= GF_OMP_TARGET_KIND_REGION)
2903 	      break;
2904 	    /* FALLTHRU */
2905 	  case GIMPLE_OMP_PARALLEL:
2906 	  case GIMPLE_OMP_TEAMS:
2907 	    error_at (gimple_location (stmt),
2908 		      "%<ordered%> region must be closely nested inside "
2909 		      "a loop region with an %<ordered%> clause");
2910 	    return false;
2911 	  default:
2912 	    break;
2913 	  }
2914       break;
2915     case GIMPLE_OMP_CRITICAL:
2916       {
2917 	tree this_stmt_name
2918 	  = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2919 	for (; ctx != NULL; ctx = ctx->outer)
2920 	  if (gomp_critical *other_crit
2921 	        = dyn_cast <gomp_critical *> (ctx->stmt))
2922 	    if (this_stmt_name == gimple_omp_critical_name (other_crit))
2923 	      {
2924 		error_at (gimple_location (stmt),
2925 			  "%<critical%> region may not be nested inside "
2926 			   "a %<critical%> region with the same name");
2927 		return false;
2928 	      }
2929       }
2930       break;
2931     case GIMPLE_OMP_TEAMS:
2932       if (ctx == NULL
2933 	  || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2934 	  || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2935 	{
2936 	  error_at (gimple_location (stmt),
2937 		    "%<teams%> construct not closely nested inside of "
2938 		    "%<target%> construct");
2939 	  return false;
2940 	}
2941       break;
2942     case GIMPLE_OMP_TARGET:
2943       for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2944 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2945 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2946 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2947 	  {
2948 	    enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2949 	    error_at (OMP_CLAUSE_LOCATION (c),
2950 		      "%<depend(%s)%> is only allowed in %<omp ordered%>",
2951 		      kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2952 	    return false;
2953 	  }
2954       if (is_gimple_omp_offloaded (stmt)
2955 	  && oacc_get_fn_attrib (cfun->decl) != NULL)
2956 	{
2957 	  error_at (gimple_location (stmt),
2958 		    "OpenACC region inside of OpenACC routine, nested "
2959 		    "parallelism not supported yet");
2960 	  return false;
2961 	}
2962       for (; ctx != NULL; ctx = ctx->outer)
2963 	{
2964 	  if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2965 	    {
2966 	      if (is_gimple_omp (stmt)
2967 		  && is_gimple_omp_oacc (stmt)
2968 		  && is_gimple_omp (ctx->stmt))
2969 		{
2970 		  error_at (gimple_location (stmt),
2971 			    "OpenACC construct inside of non-OpenACC region");
2972 		  return false;
2973 		}
2974 	      continue;
2975 	    }
2976 
2977 	  const char *stmt_name, *ctx_stmt_name;
2978 	  switch (gimple_omp_target_kind (stmt))
2979 	    {
2980 	    case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2981 	    case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2982 	    case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2983 	    case GF_OMP_TARGET_KIND_ENTER_DATA:
2984 	      stmt_name = "target enter data"; break;
2985 	    case GF_OMP_TARGET_KIND_EXIT_DATA:
2986 	      stmt_name = "target exit data"; break;
2987 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2988 	    case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2989 	    case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2990 	    case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2991 	    case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2992 	      stmt_name = "enter/exit data"; break;
2993 	    case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2994 	      break;
2995 	    default: gcc_unreachable ();
2996 	    }
2997 	  switch (gimple_omp_target_kind (ctx->stmt))
2998 	    {
2999 	    case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3000 	    case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3001 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3002 	      ctx_stmt_name = "parallel"; break;
3003 	    case GF_OMP_TARGET_KIND_OACC_KERNELS:
3004 	      ctx_stmt_name = "kernels"; break;
3005 	    case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3006 	    case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3007 	      ctx_stmt_name = "host_data"; break;
3008 	    default: gcc_unreachable ();
3009 	    }
3010 
3011 	  /* OpenACC/OpenMP mismatch?  */
3012 	  if (is_gimple_omp_oacc (stmt)
3013 	      != is_gimple_omp_oacc (ctx->stmt))
3014 	    {
3015 	      error_at (gimple_location (stmt),
3016 			"%s %qs construct inside of %s %qs region",
3017 			(is_gimple_omp_oacc (stmt)
3018 			 ? "OpenACC" : "OpenMP"), stmt_name,
3019 			(is_gimple_omp_oacc (ctx->stmt)
3020 			 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3021 	      return false;
3022 	    }
3023 	  if (is_gimple_omp_offloaded (ctx->stmt))
3024 	    {
3025 	      /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX.  */
3026 	      if (is_gimple_omp_oacc (ctx->stmt))
3027 		{
3028 		  error_at (gimple_location (stmt),
3029 			    "%qs construct inside of %qs region",
3030 			    stmt_name, ctx_stmt_name);
3031 		  return false;
3032 		}
3033 	      else
3034 		{
3035 		  warning_at (gimple_location (stmt), 0,
3036 			      "%qs construct inside of %qs region",
3037 			      stmt_name, ctx_stmt_name);
3038 		}
3039 	    }
3040 	}
3041       break;
3042     default:
3043       break;
3044     }
3045   return true;
3046 }
3047 
3048 
3049 /* Helper function scan_omp.
3050 
3051    Callback for walk_tree or operators in walk_gimple_stmt used to
3052    scan for OMP directives in TP.  */
3053 
3054 static tree
3055 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3056 {
3057   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3058   omp_context *ctx = (omp_context *) wi->info;
3059   tree t = *tp;
3060 
3061   switch (TREE_CODE (t))
3062     {
3063     case VAR_DECL:
3064     case PARM_DECL:
3065     case LABEL_DECL:
3066     case RESULT_DECL:
3067       if (ctx)
3068 	{
3069 	  tree repl = remap_decl (t, &ctx->cb);
3070 	  gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3071 	  *tp = repl;
3072 	}
3073       break;
3074 
3075     default:
3076       if (ctx && TYPE_P (t))
3077 	*tp = remap_type (t, &ctx->cb);
3078       else if (!DECL_P (t))
3079 	{
3080 	  *walk_subtrees = 1;
3081 	  if (ctx)
3082 	    {
3083 	      tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3084 	      if (tem != TREE_TYPE (t))
3085 		{
3086 		  if (TREE_CODE (t) == INTEGER_CST)
3087 		    *tp = wide_int_to_tree (tem, t);
3088 		  else
3089 		    TREE_TYPE (t) = tem;
3090 		}
3091 	    }
3092 	}
3093       break;
3094     }
3095 
3096   return NULL_TREE;
3097 }
3098 
3099 /* Return true if FNDECL is a setjmp or a longjmp.  */
3100 
3101 static bool
3102 setjmp_or_longjmp_p (const_tree fndecl)
3103 {
3104   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3105       && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3106 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3107     return true;
3108 
3109   tree declname = DECL_NAME (fndecl);
3110   if (!declname)
3111     return false;
3112   const char *name = IDENTIFIER_POINTER (declname);
3113   return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3114 }
3115 
3116 
3117 /* Helper function for scan_omp.
3118 
3119    Callback for walk_gimple_stmt used to scan for OMP directives in
3120    the current statement in GSI.  */
3121 
3122 static tree
3123 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3124 		 struct walk_stmt_info *wi)
3125 {
3126   gimple *stmt = gsi_stmt (*gsi);
3127   omp_context *ctx = (omp_context *) wi->info;
3128 
3129   if (gimple_has_location (stmt))
3130     input_location = gimple_location (stmt);
3131 
3132   /* Check the nesting restrictions.  */
3133   bool remove = false;
3134   if (is_gimple_omp (stmt))
3135     remove = !check_omp_nesting_restrictions (stmt, ctx);
3136   else if (is_gimple_call (stmt))
3137     {
3138       tree fndecl = gimple_call_fndecl (stmt);
3139       if (fndecl)
3140 	{
3141 	  if (setjmp_or_longjmp_p (fndecl)
3142 	      && ctx
3143 	      && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3144 	      && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3145 	    {
3146 	      remove = true;
3147 	      error_at (gimple_location (stmt),
3148 			"setjmp/longjmp inside simd construct");
3149 	    }
3150 	  else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3151 	    switch (DECL_FUNCTION_CODE (fndecl))
3152 	      {
3153 	      case BUILT_IN_GOMP_BARRIER:
3154 	      case BUILT_IN_GOMP_CANCEL:
3155 	      case BUILT_IN_GOMP_CANCELLATION_POINT:
3156 	      case BUILT_IN_GOMP_TASKYIELD:
3157 	      case BUILT_IN_GOMP_TASKWAIT:
3158 	      case BUILT_IN_GOMP_TASKGROUP_START:
3159 	      case BUILT_IN_GOMP_TASKGROUP_END:
3160 		remove = !check_omp_nesting_restrictions (stmt, ctx);
3161 		break;
3162 	      default:
3163 		break;
3164 	      }
3165 	}
3166     }
3167   if (remove)
3168     {
3169       stmt = gimple_build_nop ();
3170       gsi_replace (gsi, stmt, false);
3171     }
3172 
3173   *handled_ops_p = true;
3174 
3175   switch (gimple_code (stmt))
3176     {
3177     case GIMPLE_OMP_PARALLEL:
3178       taskreg_nesting_level++;
3179       scan_omp_parallel (gsi, ctx);
3180       taskreg_nesting_level--;
3181       break;
3182 
3183     case GIMPLE_OMP_TASK:
3184       taskreg_nesting_level++;
3185       scan_omp_task (gsi, ctx);
3186       taskreg_nesting_level--;
3187       break;
3188 
3189     case GIMPLE_OMP_FOR:
3190       if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3191 	    & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3192 	  && omp_maybe_offloaded_ctx (ctx)
3193 	  && omp_max_simt_vf ())
3194 	scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3195       else
3196 	scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3197       break;
3198 
3199     case GIMPLE_OMP_SECTIONS:
3200       scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3201       break;
3202 
3203     case GIMPLE_OMP_SINGLE:
3204       scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3205       break;
3206 
3207     case GIMPLE_OMP_SECTION:
3208     case GIMPLE_OMP_MASTER:
3209     case GIMPLE_OMP_TASKGROUP:
3210     case GIMPLE_OMP_ORDERED:
3211     case GIMPLE_OMP_CRITICAL:
3212     case GIMPLE_OMP_GRID_BODY:
3213       ctx = new_omp_context (stmt, ctx);
3214       scan_omp (gimple_omp_body_ptr (stmt), ctx);
3215       break;
3216 
3217     case GIMPLE_OMP_TARGET:
3218       scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3219       break;
3220 
3221     case GIMPLE_OMP_TEAMS:
3222       scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3223       break;
3224 
3225     case GIMPLE_BIND:
3226       {
3227 	tree var;
3228 
3229 	*handled_ops_p = false;
3230 	if (ctx)
3231 	  for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3232 	       var ;
3233 	       var = DECL_CHAIN (var))
3234 	    insert_decl_map (&ctx->cb, var, var);
3235       }
3236       break;
3237     default:
3238       *handled_ops_p = false;
3239       break;
3240     }
3241 
3242   return NULL_TREE;
3243 }
3244 
3245 
3246 /* Scan all the statements starting at the current statement.  CTX
3247    contains context information about the OMP directives and
3248    clauses found during the scan.  */
3249 
3250 static void
3251 scan_omp (gimple_seq *body_p, omp_context *ctx)
3252 {
3253   location_t saved_location;
3254   struct walk_stmt_info wi;
3255 
3256   memset (&wi, 0, sizeof (wi));
3257   wi.info = ctx;
3258   wi.want_locations = true;
3259 
3260   saved_location = input_location;
3261   walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3262   input_location = saved_location;
3263 }
3264 
3265 /* Re-gimplification and code generation routines.  */
3266 
3267 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3268    of BIND if in a method.  */
3269 
3270 static void
3271 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3272 {
3273   if (DECL_ARGUMENTS (current_function_decl)
3274       && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3275       && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3276 	  == POINTER_TYPE))
3277     {
3278       tree vars = gimple_bind_vars (bind);
3279       for (tree *pvar = &vars; *pvar; )
3280 	if (omp_member_access_dummy_var (*pvar))
3281 	  *pvar = DECL_CHAIN (*pvar);
3282 	else
3283 	  pvar = &DECL_CHAIN (*pvar);
3284       gimple_bind_set_vars (bind, vars);
3285     }
3286 }
3287 
3288 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3289    block and its subblocks.  */
3290 
3291 static void
3292 remove_member_access_dummy_vars (tree block)
3293 {
3294   for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3295     if (omp_member_access_dummy_var (*pvar))
3296       *pvar = DECL_CHAIN (*pvar);
3297     else
3298       pvar = &DECL_CHAIN (*pvar);
3299 
3300   for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3301     remove_member_access_dummy_vars (block);
3302 }
3303 
3304 /* If a context was created for STMT when it was scanned, return it.  */
3305 
3306 static omp_context *
3307 maybe_lookup_ctx (gimple *stmt)
3308 {
3309   splay_tree_node n;
3310   n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3311   return n ? (omp_context *) n->value : NULL;
3312 }
3313 
3314 
3315 /* Find the mapping for DECL in CTX or the immediately enclosing
3316    context that has a mapping for DECL.
3317 
3318    If CTX is a nested parallel directive, we may have to use the decl
3319    mappings created in CTX's parent context.  Suppose that we have the
3320    following parallel nesting (variable UIDs showed for clarity):
3321 
3322 	iD.1562 = 0;
3323      	#omp parallel shared(iD.1562)		-> outer parallel
3324 	  iD.1562 = iD.1562 + 1;
3325 
3326 	  #omp parallel shared (iD.1562)	-> inner parallel
3327 	     iD.1562 = iD.1562 - 1;
3328 
3329    Each parallel structure will create a distinct .omp_data_s structure
3330    for copying iD.1562 in/out of the directive:
3331 
3332   	outer parallel		.omp_data_s.1.i -> iD.1562
3333 	inner parallel		.omp_data_s.2.i -> iD.1562
3334 
3335    A shared variable mapping will produce a copy-out operation before
3336    the parallel directive and a copy-in operation after it.  So, in
3337    this case we would have:
3338 
3339   	iD.1562 = 0;
3340 	.omp_data_o.1.i = iD.1562;
3341 	#omp parallel shared(iD.1562)		-> outer parallel
3342 	  .omp_data_i.1 = &.omp_data_o.1
3343 	  .omp_data_i.1->i = .omp_data_i.1->i + 1;
3344 
3345 	  .omp_data_o.2.i = iD.1562;		-> **
3346 	  #omp parallel shared(iD.1562)		-> inner parallel
3347 	    .omp_data_i.2 = &.omp_data_o.2
3348 	    .omp_data_i.2->i = .omp_data_i.2->i - 1;
3349 
3350 
3351     ** This is a problem.  The symbol iD.1562 cannot be referenced
3352        inside the body of the outer parallel region.  But since we are
3353        emitting this copy operation while expanding the inner parallel
3354        directive, we need to access the CTX structure of the outer
3355        parallel directive to get the correct mapping:
3356 
3357 	  .omp_data_o.2.i = .omp_data_i.1->i
3358 
3359     Since there may be other workshare or parallel directives enclosing
3360     the parallel directive, it may be necessary to walk up the context
3361     parent chain.  This is not a problem in general because nested
3362     parallelism happens only rarely.  */
3363 
3364 static tree
3365 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3366 {
3367   tree t;
3368   omp_context *up;
3369 
3370   for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3371     t = maybe_lookup_decl (decl, up);
3372 
3373   gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3374 
3375   return t ? t : decl;
3376 }
3377 
3378 
3379 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3380    in outer contexts.  */
3381 
3382 static tree
3383 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3384 {
3385   tree t = NULL;
3386   omp_context *up;
3387 
3388   for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3389     t = maybe_lookup_decl (decl, up);
3390 
3391   return t ? t : decl;
3392 }
3393 
3394 
3395 /* Construct the initialization value for reduction operation OP.  */
3396 
3397 tree
3398 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3399 {
3400   switch (op)
3401     {
3402     case PLUS_EXPR:
3403     case MINUS_EXPR:
3404     case BIT_IOR_EXPR:
3405     case BIT_XOR_EXPR:
3406     case TRUTH_OR_EXPR:
3407     case TRUTH_ORIF_EXPR:
3408     case TRUTH_XOR_EXPR:
3409     case NE_EXPR:
3410       return build_zero_cst (type);
3411 
3412     case MULT_EXPR:
3413     case TRUTH_AND_EXPR:
3414     case TRUTH_ANDIF_EXPR:
3415     case EQ_EXPR:
3416       return fold_convert_loc (loc, type, integer_one_node);
3417 
3418     case BIT_AND_EXPR:
3419       return fold_convert_loc (loc, type, integer_minus_one_node);
3420 
3421     case MAX_EXPR:
3422       if (SCALAR_FLOAT_TYPE_P (type))
3423 	{
3424 	  REAL_VALUE_TYPE max, min;
3425 	  if (HONOR_INFINITIES (type))
3426 	    {
3427 	      real_inf (&max);
3428 	      real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3429 	    }
3430 	  else
3431 	    real_maxval (&min, 1, TYPE_MODE (type));
3432 	  return build_real (type, min);
3433 	}
3434       else if (POINTER_TYPE_P (type))
3435 	{
3436 	  wide_int min
3437 	    = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3438 	  return wide_int_to_tree (type, min);
3439 	}
3440       else
3441 	{
3442 	  gcc_assert (INTEGRAL_TYPE_P (type));
3443 	  return TYPE_MIN_VALUE (type);
3444 	}
3445 
3446     case MIN_EXPR:
3447       if (SCALAR_FLOAT_TYPE_P (type))
3448 	{
3449 	  REAL_VALUE_TYPE max;
3450 	  if (HONOR_INFINITIES (type))
3451 	    real_inf (&max);
3452 	  else
3453 	    real_maxval (&max, 0, TYPE_MODE (type));
3454 	  return build_real (type, max);
3455 	}
3456       else if (POINTER_TYPE_P (type))
3457 	{
3458 	  wide_int max
3459 	    = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3460 	  return wide_int_to_tree (type, max);
3461 	}
3462       else
3463 	{
3464 	  gcc_assert (INTEGRAL_TYPE_P (type));
3465 	  return TYPE_MAX_VALUE (type);
3466 	}
3467 
3468     default:
3469       gcc_unreachable ();
3470     }
3471 }
3472 
3473 /* Construct the initialization value for reduction CLAUSE.  */
3474 
3475 tree
3476 omp_reduction_init (tree clause, tree type)
3477 {
3478   return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3479 				OMP_CLAUSE_REDUCTION_CODE (clause), type);
3480 }
3481 
3482 /* Return alignment to be assumed for var in CLAUSE, which should be
3483    OMP_CLAUSE_ALIGNED.  */
3484 
3485 static tree
3486 omp_clause_aligned_alignment (tree clause)
3487 {
3488   if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3489     return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3490 
3491   /* Otherwise return implementation defined alignment.  */
3492   unsigned int al = 1;
3493   machine_mode mode, vmode;
3494   int vs = targetm.vectorize.autovectorize_vector_sizes ();
3495   if (vs)
3496     vs = 1 << floor_log2 (vs);
3497   static enum mode_class classes[]
3498     = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3499   for (int i = 0; i < 4; i += 2)
3500     for (mode = GET_CLASS_NARROWEST_MODE (classes[i]);
3501 	 mode != VOIDmode;
3502 	 mode = GET_MODE_WIDER_MODE (mode))
3503       {
3504 	vmode = targetm.vectorize.preferred_simd_mode (mode);
3505 	if (GET_MODE_CLASS (vmode) != classes[i + 1])
3506 	  continue;
3507 	while (vs
3508 	       && GET_MODE_SIZE (vmode) < vs
3509 	       && GET_MODE_2XWIDER_MODE (vmode) != VOIDmode)
3510 	  vmode = GET_MODE_2XWIDER_MODE (vmode);
3511 
3512 	tree type = lang_hooks.types.type_for_mode (mode, 1);
3513 	if (type == NULL_TREE || TYPE_MODE (type) != mode)
3514 	  continue;
3515 	type = build_vector_type (type, GET_MODE_SIZE (vmode)
3516 					/ GET_MODE_SIZE (mode));
3517 	if (TYPE_MODE (type) != vmode)
3518 	  continue;
3519 	if (TYPE_ALIGN_UNIT (type) > al)
3520 	  al = TYPE_ALIGN_UNIT (type);
3521       }
3522   return build_int_cst (integer_type_node, al);
3523 }
3524 
3525 
3526 /* This structure is part of the interface between lower_rec_simd_input_clauses
3527    and lower_rec_input_clauses.  */
3528 
3529 struct omplow_simd_context {
3530   tree idx;
3531   tree lane;
3532   vec<tree, va_heap> simt_eargs;
3533   gimple_seq simt_dlist;
3534   int max_vf;
3535   bool is_simt;
3536 };
3537 
3538 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3539    privatization.  */
3540 
3541 static bool
3542 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3543 			      omplow_simd_context *sctx, tree &ivar, tree &lvar)
3544 {
3545   if (sctx->max_vf == 0)
3546     {
3547       sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3548       if (sctx->max_vf > 1)
3549 	{
3550 	  tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3551 				    OMP_CLAUSE_SAFELEN);
3552 	  if (c
3553 	      && (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) != INTEGER_CST
3554 		  || tree_int_cst_sgn (OMP_CLAUSE_SAFELEN_EXPR (c)) != 1))
3555 	    sctx->max_vf = 1;
3556 	  else if (c && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
3557 					  sctx->max_vf) == -1)
3558 	    sctx->max_vf = tree_to_shwi (OMP_CLAUSE_SAFELEN_EXPR (c));
3559 	}
3560       if (sctx->max_vf > 1)
3561 	{
3562 	  sctx->idx = create_tmp_var (unsigned_type_node);
3563 	  sctx->lane = create_tmp_var (unsigned_type_node);
3564 	}
3565     }
3566   if (sctx->max_vf == 1)
3567     return false;
3568 
3569   if (sctx->is_simt)
3570     {
3571       if (is_gimple_reg (new_var))
3572 	{
3573 	  ivar = lvar = new_var;
3574 	  return true;
3575 	}
3576       tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3577       ivar = lvar = create_tmp_var (type);
3578       TREE_ADDRESSABLE (ivar) = 1;
3579       DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3580 					  NULL, DECL_ATTRIBUTES (ivar));
3581       sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3582       tree clobber = build_constructor (type, NULL);
3583       TREE_THIS_VOLATILE (clobber) = 1;
3584       gimple *g = gimple_build_assign (ivar, clobber);
3585       gimple_seq_add_stmt (&sctx->simt_dlist, g);
3586     }
3587   else
3588     {
3589       tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3590       tree avar = create_tmp_var_raw (atype);
3591       if (TREE_ADDRESSABLE (new_var))
3592 	TREE_ADDRESSABLE (avar) = 1;
3593       DECL_ATTRIBUTES (avar)
3594 	= tree_cons (get_identifier ("omp simd array"), NULL,
3595 		     DECL_ATTRIBUTES (avar));
3596       gimple_add_tmp_var (avar);
3597       ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3598 		     NULL_TREE, NULL_TREE);
3599       lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3600 		     NULL_TREE, NULL_TREE);
3601     }
3602   if (DECL_P (new_var))
3603     {
3604       SET_DECL_VALUE_EXPR (new_var, lvar);
3605       DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3606     }
3607   return true;
3608 }
3609 
3610 /* Helper function of lower_rec_input_clauses.  For a reference
3611    in simd reduction, add an underlying variable it will reference.  */
3612 
3613 static void
3614 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3615 {
3616   tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3617   if (TREE_CONSTANT (z))
3618     {
3619       z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3620 			      get_name (new_vard));
3621       gimple_add_tmp_var (z);
3622       TREE_ADDRESSABLE (z) = 1;
3623       z = build_fold_addr_expr_loc (loc, z);
3624       gimplify_assign (new_vard, z, ilist);
3625     }
3626 }
3627 
3628 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3629    from the receiver (aka child) side and initializers for REFERENCE_TYPE
3630    private variables.  Initialization statements go in ILIST, while calls
3631    to destructors go in DLIST.  */
3632 
3633 static void
3634 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3635 			 omp_context *ctx, struct omp_for_data *fd)
3636 {
3637   tree c, dtor, copyin_seq, x, ptr;
3638   bool copyin_by_ref = false;
3639   bool lastprivate_firstprivate = false;
3640   bool reduction_omp_orig_ref = false;
3641   int pass;
3642   bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3643 		  && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3644   omplow_simd_context sctx = omplow_simd_context ();
3645   tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3646   tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3647   gimple_seq llist[3] = { };
3648 
3649   copyin_seq = NULL;
3650   sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3651 
3652   /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3653      with data sharing clauses referencing variable sized vars.  That
3654      is unnecessarily hard to support and very unlikely to result in
3655      vectorized code anyway.  */
3656   if (is_simd)
3657     for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3658       switch (OMP_CLAUSE_CODE (c))
3659 	{
3660 	case OMP_CLAUSE_LINEAR:
3661 	  if (OMP_CLAUSE_LINEAR_ARRAY (c))
3662 	    sctx.max_vf = 1;
3663 	  /* FALLTHRU */
3664 	case OMP_CLAUSE_PRIVATE:
3665 	case OMP_CLAUSE_FIRSTPRIVATE:
3666 	case OMP_CLAUSE_LASTPRIVATE:
3667 	  if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3668 	    sctx.max_vf = 1;
3669 	  break;
3670 	case OMP_CLAUSE_REDUCTION:
3671 	  if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3672 	      || is_variable_sized (OMP_CLAUSE_DECL (c)))
3673 	    sctx.max_vf = 1;
3674 	  break;
3675 	default:
3676 	  continue;
3677 	}
3678 
3679   /* Add a placeholder for simduid.  */
3680   if (sctx.is_simt && sctx.max_vf != 1)
3681     sctx.simt_eargs.safe_push (NULL_TREE);
3682 
3683   /* Do all the fixed sized types in the first pass, and the variable sized
3684      types in the second pass.  This makes sure that the scalar arguments to
3685      the variable sized types are processed before we use them in the
3686      variable sized operations.  */
3687   for (pass = 0; pass < 2; ++pass)
3688     {
3689       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3690 	{
3691 	  enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3692 	  tree var, new_var;
3693 	  bool by_ref;
3694 	  location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3695 
3696 	  switch (c_kind)
3697 	    {
3698 	    case OMP_CLAUSE_PRIVATE:
3699 	      if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3700 		continue;
3701 	      break;
3702 	    case OMP_CLAUSE_SHARED:
3703 	      /* Ignore shared directives in teams construct.  */
3704 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3705 		continue;
3706 	      if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3707 		{
3708 		  gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3709 			      || is_global_var (OMP_CLAUSE_DECL (c)));
3710 		  continue;
3711 		}
3712 	    case OMP_CLAUSE_FIRSTPRIVATE:
3713 	    case OMP_CLAUSE_COPYIN:
3714 	      break;
3715 	    case OMP_CLAUSE_LINEAR:
3716 	      if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3717 		  && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3718 		lastprivate_firstprivate = true;
3719 	      break;
3720 	    case OMP_CLAUSE_REDUCTION:
3721 	      if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3722 		reduction_omp_orig_ref = true;
3723 	      break;
3724 	    case OMP_CLAUSE__LOOPTEMP_:
3725 	      /* Handle _looptemp_ clauses only on parallel/task.  */
3726 	      if (fd)
3727 		continue;
3728 	      break;
3729 	    case OMP_CLAUSE_LASTPRIVATE:
3730 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3731 		{
3732 		  lastprivate_firstprivate = true;
3733 		  if (pass != 0 || is_taskloop_ctx (ctx))
3734 		    continue;
3735 		}
3736 	      /* Even without corresponding firstprivate, if
3737 		 decl is Fortran allocatable, it needs outer var
3738 		 reference.  */
3739 	      else if (pass == 0
3740 		       && lang_hooks.decls.omp_private_outer_ref
3741 							(OMP_CLAUSE_DECL (c)))
3742 		lastprivate_firstprivate = true;
3743 	      break;
3744 	    case OMP_CLAUSE_ALIGNED:
3745 	      if (pass == 0)
3746 		continue;
3747 	      var = OMP_CLAUSE_DECL (c);
3748 	      if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3749 		  && !is_global_var (var))
3750 		{
3751 		  new_var = maybe_lookup_decl (var, ctx);
3752 		  if (new_var == NULL_TREE)
3753 		    new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3754 		  x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3755 		  tree alarg = omp_clause_aligned_alignment (c);
3756 		  alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3757 		  x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3758 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3759 		  x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3760 		  gimplify_and_add (x, ilist);
3761 		}
3762 	      else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3763 		       && is_global_var (var))
3764 		{
3765 		  tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3766 		  new_var = lookup_decl (var, ctx);
3767 		  t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3768 		  t = build_fold_addr_expr_loc (clause_loc, t);
3769 		  t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3770 		  tree alarg = omp_clause_aligned_alignment (c);
3771 		  alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3772 		  t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3773 		  t = fold_convert_loc (clause_loc, ptype, t);
3774 		  x = create_tmp_var (ptype);
3775 		  t = build2 (MODIFY_EXPR, ptype, x, t);
3776 		  gimplify_and_add (t, ilist);
3777 		  t = build_simple_mem_ref_loc (clause_loc, x);
3778 		  SET_DECL_VALUE_EXPR (new_var, t);
3779 		  DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3780 		}
3781 	      continue;
3782 	    default:
3783 	      continue;
3784 	    }
3785 
3786 	  new_var = var = OMP_CLAUSE_DECL (c);
3787 	  if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3788 	    {
3789 	      var = TREE_OPERAND (var, 0);
3790 	      if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3791 		var = TREE_OPERAND (var, 0);
3792 	      if (TREE_CODE (var) == INDIRECT_REF
3793 		  || TREE_CODE (var) == ADDR_EXPR)
3794 		var = TREE_OPERAND (var, 0);
3795 	      if (is_variable_sized (var))
3796 		{
3797 		  gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3798 		  var = DECL_VALUE_EXPR (var);
3799 		  gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3800 		  var = TREE_OPERAND (var, 0);
3801 		  gcc_assert (DECL_P (var));
3802 		}
3803 	      new_var = var;
3804 	    }
3805 	  if (c_kind != OMP_CLAUSE_COPYIN)
3806 	    new_var = lookup_decl (var, ctx);
3807 
3808 	  if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3809 	    {
3810 	      if (pass != 0)
3811 		continue;
3812 	    }
3813 	  /* C/C++ array section reductions.  */
3814 	  else if (c_kind == OMP_CLAUSE_REDUCTION
3815 		   && var != OMP_CLAUSE_DECL (c))
3816 	    {
3817 	      if (pass == 0)
3818 		continue;
3819 
3820 	      tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3821 	      tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3822 	      if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3823 		{
3824 		  tree b = TREE_OPERAND (orig_var, 1);
3825 		  b = maybe_lookup_decl (b, ctx);
3826 		  if (b == NULL)
3827 		    {
3828 		      b = TREE_OPERAND (orig_var, 1);
3829 		      b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3830 		    }
3831 		  if (integer_zerop (bias))
3832 		    bias = b;
3833 		  else
3834 		    {
3835 		      bias = fold_convert_loc (clause_loc,
3836 					       TREE_TYPE (b), bias);
3837 		      bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3838 					      TREE_TYPE (b), b, bias);
3839 		    }
3840 		  orig_var = TREE_OPERAND (orig_var, 0);
3841 		}
3842 	      if (TREE_CODE (orig_var) == INDIRECT_REF
3843 		  || TREE_CODE (orig_var) == ADDR_EXPR)
3844 		orig_var = TREE_OPERAND (orig_var, 0);
3845 	      tree d = OMP_CLAUSE_DECL (c);
3846 	      tree type = TREE_TYPE (d);
3847 	      gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3848 	      tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3849 	      const char *name = get_name (orig_var);
3850 	      if (TREE_CONSTANT (v))
3851 		{
3852 		  x = create_tmp_var_raw (type, name);
3853 		  gimple_add_tmp_var (x);
3854 		  TREE_ADDRESSABLE (x) = 1;
3855 		  x = build_fold_addr_expr_loc (clause_loc, x);
3856 		}
3857 	      else
3858 		{
3859 		  tree atmp
3860 		    = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3861 		  tree t = maybe_lookup_decl (v, ctx);
3862 		  if (t)
3863 		    v = t;
3864 		  else
3865 		    v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3866 		  gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3867 		  t = fold_build2_loc (clause_loc, PLUS_EXPR,
3868 				       TREE_TYPE (v), v,
3869 				       build_int_cst (TREE_TYPE (v), 1));
3870 		  t = fold_build2_loc (clause_loc, MULT_EXPR,
3871 				       TREE_TYPE (v), t,
3872 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
3873 		  tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3874 		  x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3875 		}
3876 
3877 	      tree ptype = build_pointer_type (TREE_TYPE (type));
3878 	      x = fold_convert_loc (clause_loc, ptype, x);
3879 	      tree y = create_tmp_var (ptype, name);
3880 	      gimplify_assign (y, x, ilist);
3881 	      x = y;
3882 	      tree yb = y;
3883 
3884 	      if (!integer_zerop (bias))
3885 		{
3886 		  bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3887 					   bias);
3888 		  yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3889 					 x);
3890 		  yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3891 					pointer_sized_int_node, yb, bias);
3892 		  x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3893 		  yb = create_tmp_var (ptype, name);
3894 		  gimplify_assign (yb, x, ilist);
3895 		  x = yb;
3896 		}
3897 
3898 	      d = TREE_OPERAND (d, 0);
3899 	      if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3900 		d = TREE_OPERAND (d, 0);
3901 	      if (TREE_CODE (d) == ADDR_EXPR)
3902 		{
3903 		  if (orig_var != var)
3904 		    {
3905 		      gcc_assert (is_variable_sized (orig_var));
3906 		      x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3907 					    x);
3908 		      gimplify_assign (new_var, x, ilist);
3909 		      tree new_orig_var = lookup_decl (orig_var, ctx);
3910 		      tree t = build_fold_indirect_ref (new_var);
3911 		      DECL_IGNORED_P (new_var) = 0;
3912 		      TREE_THIS_NOTRAP (t);
3913 		      SET_DECL_VALUE_EXPR (new_orig_var, t);
3914 		      DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3915 		    }
3916 		  else
3917 		    {
3918 		      x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3919 				  build_int_cst (ptype, 0));
3920 		      SET_DECL_VALUE_EXPR (new_var, x);
3921 		      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3922 		    }
3923 		}
3924 	      else
3925 		{
3926 		  gcc_assert (orig_var == var);
3927 		  if (TREE_CODE (d) == INDIRECT_REF)
3928 		    {
3929 		      x = create_tmp_var (ptype, name);
3930 		      TREE_ADDRESSABLE (x) = 1;
3931 		      gimplify_assign (x, yb, ilist);
3932 		      x = build_fold_addr_expr_loc (clause_loc, x);
3933 		    }
3934 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3935 		  gimplify_assign (new_var, x, ilist);
3936 		}
3937 	      tree y1 = create_tmp_var (ptype, NULL);
3938 	      gimplify_assign (y1, y, ilist);
3939 	      tree i2 = NULL_TREE, y2 = NULL_TREE;
3940 	      tree body2 = NULL_TREE, end2 = NULL_TREE;
3941 	      tree y3 = NULL_TREE, y4 = NULL_TREE;
3942 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3943 		{
3944 		  y2 = create_tmp_var (ptype, NULL);
3945 		  gimplify_assign (y2, y, ilist);
3946 		  tree ref = build_outer_var_ref (var, ctx);
3947 		  /* For ref build_outer_var_ref already performs this.  */
3948 		  if (TREE_CODE (d) == INDIRECT_REF)
3949 		    gcc_assert (omp_is_reference (var));
3950 		  else if (TREE_CODE (d) == ADDR_EXPR)
3951 		    ref = build_fold_addr_expr (ref);
3952 		  else if (omp_is_reference (var))
3953 		    ref = build_fold_addr_expr (ref);
3954 		  ref = fold_convert_loc (clause_loc, ptype, ref);
3955 		  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3956 		      && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3957 		    {
3958 		      y3 = create_tmp_var (ptype, NULL);
3959 		      gimplify_assign (y3, unshare_expr (ref), ilist);
3960 		    }
3961 		  if (is_simd)
3962 		    {
3963 		      y4 = create_tmp_var (ptype, NULL);
3964 		      gimplify_assign (y4, ref, dlist);
3965 		    }
3966 		}
3967 	      tree i = create_tmp_var (TREE_TYPE (v), NULL);
3968 	      gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3969 	      tree body = create_artificial_label (UNKNOWN_LOCATION);
3970 	      tree end = create_artificial_label (UNKNOWN_LOCATION);
3971 	      gimple_seq_add_stmt (ilist, gimple_build_label (body));
3972 	      if (y2)
3973 		{
3974 		  i2 = create_tmp_var (TREE_TYPE (v), NULL);
3975 		  gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3976 		  body2 = create_artificial_label (UNKNOWN_LOCATION);
3977 		  end2 = create_artificial_label (UNKNOWN_LOCATION);
3978 		  gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3979 		}
3980 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3981 		{
3982 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3983 		  tree decl_placeholder
3984 		    = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3985 		  SET_DECL_VALUE_EXPR (decl_placeholder,
3986 				       build_simple_mem_ref (y1));
3987 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3988 		  SET_DECL_VALUE_EXPR (placeholder,
3989 				       y3 ? build_simple_mem_ref (y3)
3990 				       : error_mark_node);
3991 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3992 		  x = lang_hooks.decls.omp_clause_default_ctor
3993 				(c, build_simple_mem_ref (y1),
3994 				 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3995 		  if (x)
3996 		    gimplify_and_add (x, ilist);
3997 		  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3998 		    {
3999 		      gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4000 		      lower_omp (&tseq, ctx);
4001 		      gimple_seq_add_seq (ilist, tseq);
4002 		    }
4003 		  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4004 		  if (is_simd)
4005 		    {
4006 		      SET_DECL_VALUE_EXPR (decl_placeholder,
4007 					   build_simple_mem_ref (y2));
4008 		      SET_DECL_VALUE_EXPR (placeholder,
4009 					   build_simple_mem_ref (y4));
4010 		      gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4011 		      lower_omp (&tseq, ctx);
4012 		      gimple_seq_add_seq (dlist, tseq);
4013 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4014 		    }
4015 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4016 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4017 		  x = lang_hooks.decls.omp_clause_dtor
4018 					(c, build_simple_mem_ref (y2));
4019 		  if (x)
4020 		    {
4021 		      gimple_seq tseq = NULL;
4022 		      dtor = x;
4023 		      gimplify_stmt (&dtor, &tseq);
4024 		      gimple_seq_add_seq (dlist, tseq);
4025 		    }
4026 		}
4027 	      else
4028 		{
4029 		  x = omp_reduction_init (c, TREE_TYPE (type));
4030 		  enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4031 
4032 		  /* reduction(-:var) sums up the partial results, so it
4033 		     acts identically to reduction(+:var).  */
4034 		  if (code == MINUS_EXPR)
4035 		    code = PLUS_EXPR;
4036 
4037 		  gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4038 		  if (is_simd)
4039 		    {
4040 		      x = build2 (code, TREE_TYPE (type),
4041 				  build_simple_mem_ref (y4),
4042 				  build_simple_mem_ref (y2));
4043 		      gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4044 		    }
4045 		}
4046 	      gimple *g
4047 		= gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4048 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
4049 	      gimple_seq_add_stmt (ilist, g);
4050 	      if (y3)
4051 		{
4052 		  g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4053 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
4054 		  gimple_seq_add_stmt (ilist, g);
4055 		}
4056 	      g = gimple_build_assign (i, PLUS_EXPR, i,
4057 				       build_int_cst (TREE_TYPE (i), 1));
4058 	      gimple_seq_add_stmt (ilist, g);
4059 	      g = gimple_build_cond (LE_EXPR, i, v, body, end);
4060 	      gimple_seq_add_stmt (ilist, g);
4061 	      gimple_seq_add_stmt (ilist, gimple_build_label (end));
4062 	      if (y2)
4063 		{
4064 		  g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4065 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
4066 		  gimple_seq_add_stmt (dlist, g);
4067 		  if (y4)
4068 		    {
4069 		      g = gimple_build_assign
4070 					(y4, POINTER_PLUS_EXPR, y4,
4071 					 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4072 		      gimple_seq_add_stmt (dlist, g);
4073 		    }
4074 		  g = gimple_build_assign (i2, PLUS_EXPR, i2,
4075 					   build_int_cst (TREE_TYPE (i2), 1));
4076 		  gimple_seq_add_stmt (dlist, g);
4077 		  g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4078 		  gimple_seq_add_stmt (dlist, g);
4079 		  gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4080 		}
4081 	      continue;
4082 	    }
4083 	  else if (is_variable_sized (var))
4084 	    {
4085 	      /* For variable sized types, we need to allocate the
4086 		 actual storage here.  Call alloca and store the
4087 		 result in the pointer decl that we created elsewhere.  */
4088 	      if (pass == 0)
4089 		continue;
4090 
4091 	      if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4092 		{
4093 		  gcall *stmt;
4094 		  tree tmp, atmp;
4095 
4096 		  ptr = DECL_VALUE_EXPR (new_var);
4097 		  gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4098 		  ptr = TREE_OPERAND (ptr, 0);
4099 		  gcc_assert (DECL_P (ptr));
4100 		  x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4101 
4102 		  /* void *tmp = __builtin_alloca */
4103 		  atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4104 		  stmt = gimple_build_call (atmp, 2, x,
4105 					    size_int (DECL_ALIGN (var)));
4106 		  tmp = create_tmp_var_raw (ptr_type_node);
4107 		  gimple_add_tmp_var (tmp);
4108 		  gimple_call_set_lhs (stmt, tmp);
4109 
4110 		  gimple_seq_add_stmt (ilist, stmt);
4111 
4112 		  x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4113 		  gimplify_assign (ptr, x, ilist);
4114 		}
4115 	    }
4116 	  else if (omp_is_reference (var))
4117 	    {
4118 	      /* For references that are being privatized for Fortran,
4119 		 allocate new backing storage for the new pointer
4120 		 variable.  This allows us to avoid changing all the
4121 		 code that expects a pointer to something that expects
4122 		 a direct variable.  */
4123 	      if (pass == 0)
4124 		continue;
4125 
4126 	      x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4127 	      if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4128 		{
4129 		  x = build_receiver_ref (var, false, ctx);
4130 		  x = build_fold_addr_expr_loc (clause_loc, x);
4131 		}
4132 	      else if (TREE_CONSTANT (x))
4133 		{
4134 		  /* For reduction in SIMD loop, defer adding the
4135 		     initialization of the reference, because if we decide
4136 		     to use SIMD array for it, the initilization could cause
4137 		     expansion ICE.  */
4138 		  if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4139 		    x = NULL_TREE;
4140 		  else
4141 		    {
4142 		      x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4143 					      get_name (var));
4144 		      gimple_add_tmp_var (x);
4145 		      TREE_ADDRESSABLE (x) = 1;
4146 		      x = build_fold_addr_expr_loc (clause_loc, x);
4147 		    }
4148 		}
4149 	      else
4150 		{
4151 		  tree atmp
4152 		    = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4153 		  tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4154 		  tree al = size_int (TYPE_ALIGN (rtype));
4155 		  x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4156 		}
4157 
4158 	      if (x)
4159 		{
4160 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4161 		  gimplify_assign (new_var, x, ilist);
4162 		}
4163 
4164 	      new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4165 	    }
4166 	  else if (c_kind == OMP_CLAUSE_REDUCTION
4167 		   && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4168 	    {
4169 	      if (pass == 0)
4170 		continue;
4171 	    }
4172 	  else if (pass != 0)
4173 	    continue;
4174 
4175 	  switch (OMP_CLAUSE_CODE (c))
4176 	    {
4177 	    case OMP_CLAUSE_SHARED:
4178 	      /* Ignore shared directives in teams construct.  */
4179 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4180 		continue;
4181 	      /* Shared global vars are just accessed directly.  */
4182 	      if (is_global_var (new_var))
4183 		break;
4184 	      /* For taskloop firstprivate/lastprivate, represented
4185 		 as firstprivate and shared clause on the task, new_var
4186 		 is the firstprivate var.  */
4187 	      if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4188 		break;
4189 	      /* Set up the DECL_VALUE_EXPR for shared variables now.  This
4190 		 needs to be delayed until after fixup_child_record_type so
4191 		 that we get the correct type during the dereference.  */
4192 	      by_ref = use_pointer_for_field (var, ctx);
4193 	      x = build_receiver_ref (var, by_ref, ctx);
4194 	      SET_DECL_VALUE_EXPR (new_var, x);
4195 	      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4196 
4197 	      /* ??? If VAR is not passed by reference, and the variable
4198 		 hasn't been initialized yet, then we'll get a warning for
4199 		 the store into the omp_data_s structure.  Ideally, we'd be
4200 		 able to notice this and not store anything at all, but
4201 		 we're generating code too early.  Suppress the warning.  */
4202 	      if (!by_ref)
4203 		TREE_NO_WARNING (var) = 1;
4204 	      break;
4205 
4206 	    case OMP_CLAUSE_LASTPRIVATE:
4207 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4208 		break;
4209 	      /* FALLTHRU */
4210 
4211 	    case OMP_CLAUSE_PRIVATE:
4212 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4213 		x = build_outer_var_ref (var, ctx);
4214 	      else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4215 		{
4216 		  if (is_task_ctx (ctx))
4217 		    x = build_receiver_ref (var, false, ctx);
4218 		  else
4219 		    x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4220 		}
4221 	      else
4222 		x = NULL;
4223 	    do_private:
4224 	      tree nx;
4225 	      nx = lang_hooks.decls.omp_clause_default_ctor
4226 						(c, unshare_expr (new_var), x);
4227 	      if (is_simd)
4228 		{
4229 		  tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4230 		  if ((TREE_ADDRESSABLE (new_var) || nx || y
4231 		       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4232 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4233 						       ivar, lvar))
4234 		    {
4235 		      if (nx)
4236 			x = lang_hooks.decls.omp_clause_default_ctor
4237 						(c, unshare_expr (ivar), x);
4238 		      if (nx && x)
4239 			gimplify_and_add (x, &llist[0]);
4240 		      if (y)
4241 			{
4242 			  y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4243 			  if (y)
4244 			    {
4245 			      gimple_seq tseq = NULL;
4246 
4247 			      dtor = y;
4248 			      gimplify_stmt (&dtor, &tseq);
4249 			      gimple_seq_add_seq (&llist[1], tseq);
4250 			    }
4251 			}
4252 		      break;
4253 		    }
4254 		}
4255 	      if (nx)
4256 		gimplify_and_add (nx, ilist);
4257 	      /* FALLTHRU */
4258 
4259 	    do_dtor:
4260 	      x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4261 	      if (x)
4262 		{
4263 		  gimple_seq tseq = NULL;
4264 
4265 		  dtor = x;
4266 		  gimplify_stmt (&dtor, &tseq);
4267 		  gimple_seq_add_seq (dlist, tseq);
4268 		}
4269 	      break;
4270 
4271 	    case OMP_CLAUSE_LINEAR:
4272 	      if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4273 		goto do_firstprivate;
4274 	      if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4275 		x = NULL;
4276 	      else
4277 		x = build_outer_var_ref (var, ctx);
4278 	      goto do_private;
4279 
4280 	    case OMP_CLAUSE_FIRSTPRIVATE:
4281 	      if (is_task_ctx (ctx))
4282 		{
4283 		  if (omp_is_reference (var) || is_variable_sized (var))
4284 		    goto do_dtor;
4285 		  else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4286 									  ctx))
4287 			   || use_pointer_for_field (var, NULL))
4288 		    {
4289 		      x = build_receiver_ref (var, false, ctx);
4290 		      SET_DECL_VALUE_EXPR (new_var, x);
4291 		      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4292 		      goto do_dtor;
4293 		    }
4294 		}
4295 	    do_firstprivate:
4296 	      x = build_outer_var_ref (var, ctx);
4297 	      if (is_simd)
4298 		{
4299 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4300 		      && gimple_omp_for_combined_into_p (ctx->stmt))
4301 		    {
4302 		      tree t = OMP_CLAUSE_LINEAR_STEP (c);
4303 		      tree stept = TREE_TYPE (t);
4304 		      tree ct = omp_find_clause (clauses,
4305 						 OMP_CLAUSE__LOOPTEMP_);
4306 		      gcc_assert (ct);
4307 		      tree l = OMP_CLAUSE_DECL (ct);
4308 		      tree n1 = fd->loop.n1;
4309 		      tree step = fd->loop.step;
4310 		      tree itype = TREE_TYPE (l);
4311 		      if (POINTER_TYPE_P (itype))
4312 			itype = signed_type_for (itype);
4313 		      l = fold_build2 (MINUS_EXPR, itype, l, n1);
4314 		      if (TYPE_UNSIGNED (itype)
4315 			  && fd->loop.cond_code == GT_EXPR)
4316 			l = fold_build2 (TRUNC_DIV_EXPR, itype,
4317 					 fold_build1 (NEGATE_EXPR, itype, l),
4318 					 fold_build1 (NEGATE_EXPR,
4319 						      itype, step));
4320 		      else
4321 			l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4322 		      t = fold_build2 (MULT_EXPR, stept,
4323 				       fold_convert (stept, l), t);
4324 
4325 		      if (OMP_CLAUSE_LINEAR_ARRAY (c))
4326 			{
4327 			  x = lang_hooks.decls.omp_clause_linear_ctor
4328 							(c, new_var, x, t);
4329 			  gimplify_and_add (x, ilist);
4330 			  goto do_dtor;
4331 			}
4332 
4333 		      if (POINTER_TYPE_P (TREE_TYPE (x)))
4334 			x = fold_build2 (POINTER_PLUS_EXPR,
4335 					 TREE_TYPE (x), x, t);
4336 		      else
4337 			x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4338 		    }
4339 
4340 		  if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4341 		       || TREE_ADDRESSABLE (new_var))
4342 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4343 						       ivar, lvar))
4344 		    {
4345 		      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4346 			{
4347 			  tree iv = create_tmp_var (TREE_TYPE (new_var));
4348 			  x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4349 			  gimplify_and_add (x, ilist);
4350 			  gimple_stmt_iterator gsi
4351 			    = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4352 			  gassign *g
4353 			    = gimple_build_assign (unshare_expr (lvar), iv);
4354 			  gsi_insert_before_without_update (&gsi, g,
4355 							    GSI_SAME_STMT);
4356 			  tree t = OMP_CLAUSE_LINEAR_STEP (c);
4357 			  enum tree_code code = PLUS_EXPR;
4358 			  if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4359 			    code = POINTER_PLUS_EXPR;
4360 			  g = gimple_build_assign (iv, code, iv, t);
4361 			  gsi_insert_before_without_update (&gsi, g,
4362 							    GSI_SAME_STMT);
4363 			  break;
4364 			}
4365 		      x = lang_hooks.decls.omp_clause_copy_ctor
4366 						(c, unshare_expr (ivar), x);
4367 		      gimplify_and_add (x, &llist[0]);
4368 		      x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4369 		      if (x)
4370 			{
4371 			  gimple_seq tseq = NULL;
4372 
4373 			  dtor = x;
4374 			  gimplify_stmt (&dtor, &tseq);
4375 			  gimple_seq_add_seq (&llist[1], tseq);
4376 			}
4377 		      break;
4378 		    }
4379 		}
4380 	      x = lang_hooks.decls.omp_clause_copy_ctor
4381 						(c, unshare_expr (new_var), x);
4382 	      gimplify_and_add (x, ilist);
4383 	      goto do_dtor;
4384 
4385 	    case OMP_CLAUSE__LOOPTEMP_:
4386 	      gcc_assert (is_taskreg_ctx (ctx));
4387 	      x = build_outer_var_ref (var, ctx);
4388 	      x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4389 	      gimplify_and_add (x, ilist);
4390 	      break;
4391 
4392 	    case OMP_CLAUSE_COPYIN:
4393 	      by_ref = use_pointer_for_field (var, NULL);
4394 	      x = build_receiver_ref (var, by_ref, ctx);
4395 	      x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4396 	      append_to_statement_list (x, &copyin_seq);
4397 	      copyin_by_ref |= by_ref;
4398 	      break;
4399 
4400 	    case OMP_CLAUSE_REDUCTION:
4401 	      /* OpenACC reductions are initialized using the
4402 		 GOACC_REDUCTION internal function.  */
4403 	      if (is_gimple_omp_oacc (ctx->stmt))
4404 		break;
4405 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4406 		{
4407 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4408 		  gimple *tseq;
4409 		  x = build_outer_var_ref (var, ctx);
4410 
4411 		  if (omp_is_reference (var)
4412 		      && !useless_type_conversion_p (TREE_TYPE (placeholder),
4413 						     TREE_TYPE (x)))
4414 		    x = build_fold_addr_expr_loc (clause_loc, x);
4415 		  SET_DECL_VALUE_EXPR (placeholder, x);
4416 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4417 		  tree new_vard = new_var;
4418 		  if (omp_is_reference (var))
4419 		    {
4420 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
4421 		      new_vard = TREE_OPERAND (new_var, 0);
4422 		      gcc_assert (DECL_P (new_vard));
4423 		    }
4424 		  if (is_simd
4425 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4426 						       ivar, lvar))
4427 		    {
4428 		      if (new_vard == new_var)
4429 			{
4430 			  gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4431 			  SET_DECL_VALUE_EXPR (new_var, ivar);
4432 			}
4433 		      else
4434 			{
4435 			  SET_DECL_VALUE_EXPR (new_vard,
4436 					       build_fold_addr_expr (ivar));
4437 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4438 			}
4439 		      x = lang_hooks.decls.omp_clause_default_ctor
4440 				(c, unshare_expr (ivar),
4441 				 build_outer_var_ref (var, ctx));
4442 		      if (x)
4443 			gimplify_and_add (x, &llist[0]);
4444 		      if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4445 			{
4446 			  tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4447 			  lower_omp (&tseq, ctx);
4448 			  gimple_seq_add_seq (&llist[0], tseq);
4449 			}
4450 		      OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4451 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4452 		      lower_omp (&tseq, ctx);
4453 		      gimple_seq_add_seq (&llist[1], tseq);
4454 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4455 		      DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4456 		      if (new_vard == new_var)
4457 			SET_DECL_VALUE_EXPR (new_var, lvar);
4458 		      else
4459 			SET_DECL_VALUE_EXPR (new_vard,
4460 					     build_fold_addr_expr (lvar));
4461 		      x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4462 		      if (x)
4463 			{
4464 			  tseq = NULL;
4465 			  dtor = x;
4466 			  gimplify_stmt (&dtor, &tseq);
4467 			  gimple_seq_add_seq (&llist[1], tseq);
4468 			}
4469 		      break;
4470 		    }
4471 		  /* If this is a reference to constant size reduction var
4472 		     with placeholder, we haven't emitted the initializer
4473 		     for it because it is undesirable if SIMD arrays are used.
4474 		     But if they aren't used, we need to emit the deferred
4475 		     initialization now.  */
4476 		  else if (omp_is_reference (var) && is_simd)
4477 		    handle_simd_reference (clause_loc, new_vard, ilist);
4478 		  x = lang_hooks.decls.omp_clause_default_ctor
4479 				(c, unshare_expr (new_var),
4480 				 build_outer_var_ref (var, ctx));
4481 		  if (x)
4482 		    gimplify_and_add (x, ilist);
4483 		  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4484 		    {
4485 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4486 		      lower_omp (&tseq, ctx);
4487 		      gimple_seq_add_seq (ilist, tseq);
4488 		    }
4489 		  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4490 		  if (is_simd)
4491 		    {
4492 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4493 		      lower_omp (&tseq, ctx);
4494 		      gimple_seq_add_seq (dlist, tseq);
4495 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4496 		    }
4497 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4498 		  goto do_dtor;
4499 		}
4500 	      else
4501 		{
4502 		  x = omp_reduction_init (c, TREE_TYPE (new_var));
4503 		  gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4504 		  enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4505 
4506 		  /* reduction(-:var) sums up the partial results, so it
4507 		     acts identically to reduction(+:var).  */
4508 		  if (code == MINUS_EXPR)
4509 		    code = PLUS_EXPR;
4510 
4511 		  tree new_vard = new_var;
4512 		  if (is_simd && omp_is_reference (var))
4513 		    {
4514 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
4515 		      new_vard = TREE_OPERAND (new_var, 0);
4516 		      gcc_assert (DECL_P (new_vard));
4517 		    }
4518 		  if (is_simd
4519 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4520 						       ivar, lvar))
4521 		    {
4522 		      tree ref = build_outer_var_ref (var, ctx);
4523 
4524 		      gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4525 
4526 		      if (sctx.is_simt)
4527 			{
4528 			  if (!simt_lane)
4529 			    simt_lane = create_tmp_var (unsigned_type_node);
4530 			  x = build_call_expr_internal_loc
4531 			    (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4532 			     TREE_TYPE (ivar), 2, ivar, simt_lane);
4533 			  x = build2 (code, TREE_TYPE (ivar), ivar, x);
4534 			  gimplify_assign (ivar, x, &llist[2]);
4535 			}
4536 		      x = build2 (code, TREE_TYPE (ref), ref, ivar);
4537 		      ref = build_outer_var_ref (var, ctx);
4538 		      gimplify_assign (ref, x, &llist[1]);
4539 
4540 		      if (new_vard != new_var)
4541 			{
4542 			  SET_DECL_VALUE_EXPR (new_vard,
4543 					       build_fold_addr_expr (lvar));
4544 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4545 			}
4546 		    }
4547 		  else
4548 		    {
4549 		      if (omp_is_reference (var) && is_simd)
4550 			handle_simd_reference (clause_loc, new_vard, ilist);
4551 		      gimplify_assign (new_var, x, ilist);
4552 		      if (is_simd)
4553 			{
4554 			  tree ref = build_outer_var_ref (var, ctx);
4555 
4556 			  x = build2 (code, TREE_TYPE (ref), ref, new_var);
4557 			  ref = build_outer_var_ref (var, ctx);
4558 			  gimplify_assign (ref, x, dlist);
4559 			}
4560 		    }
4561 		}
4562 	      break;
4563 
4564 	    default:
4565 	      gcc_unreachable ();
4566 	    }
4567 	}
4568     }
4569 
4570   if (sctx.max_vf == 1)
4571     sctx.is_simt = false;
4572 
4573   if (sctx.lane || sctx.is_simt)
4574     {
4575       uid = create_tmp_var (ptr_type_node, "simduid");
4576       /* Don't want uninit warnings on simduid, it is always uninitialized,
4577 	 but we use it not for the value, but for the DECL_UID only.  */
4578       TREE_NO_WARNING (uid) = 1;
4579       c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4580       OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4581       OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4582       gimple_omp_for_set_clauses (ctx->stmt, c);
4583     }
4584   /* Emit calls denoting privatized variables and initializing a pointer to
4585      structure that holds private variables as fields after ompdevlow pass.  */
4586   if (sctx.is_simt)
4587     {
4588       sctx.simt_eargs[0] = uid;
4589       gimple *g
4590 	= gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4591       gimple_call_set_lhs (g, uid);
4592       gimple_seq_add_stmt (ilist, g);
4593       sctx.simt_eargs.release ();
4594 
4595       simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4596       g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4597       gimple_call_set_lhs (g, simtrec);
4598       gimple_seq_add_stmt (ilist, g);
4599     }
4600   if (sctx.lane)
4601     {
4602       gimple *g
4603 	= gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4604       gimple_call_set_lhs (g, sctx.lane);
4605       gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4606       gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4607       g = gimple_build_assign (sctx.lane, INTEGER_CST,
4608 			       build_int_cst (unsigned_type_node, 0));
4609       gimple_seq_add_stmt (ilist, g);
4610       /* Emit reductions across SIMT lanes in log_2(simt_vf) steps.  */
4611       if (llist[2])
4612 	{
4613 	  tree simt_vf = create_tmp_var (unsigned_type_node);
4614 	  g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4615 	  gimple_call_set_lhs (g, simt_vf);
4616 	  gimple_seq_add_stmt (dlist, g);
4617 
4618 	  tree t = build_int_cst (unsigned_type_node, 1);
4619 	  g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4620 	  gimple_seq_add_stmt (dlist, g);
4621 
4622 	  t = build_int_cst (unsigned_type_node, 0);
4623 	  g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4624 	  gimple_seq_add_stmt (dlist, g);
4625 
4626 	  tree body = create_artificial_label (UNKNOWN_LOCATION);
4627 	  tree header = create_artificial_label (UNKNOWN_LOCATION);
4628 	  tree end = create_artificial_label (UNKNOWN_LOCATION);
4629 	  gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4630 	  gimple_seq_add_stmt (dlist, gimple_build_label (body));
4631 
4632 	  gimple_seq_add_seq (dlist, llist[2]);
4633 
4634 	  g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4635 	  gimple_seq_add_stmt (dlist, g);
4636 
4637 	  gimple_seq_add_stmt (dlist, gimple_build_label (header));
4638 	  g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4639 	  gimple_seq_add_stmt (dlist, g);
4640 
4641 	  gimple_seq_add_stmt (dlist, gimple_build_label (end));
4642 	}
4643       for (int i = 0; i < 2; i++)
4644 	if (llist[i])
4645 	  {
4646 	    tree vf = create_tmp_var (unsigned_type_node);
4647 	    g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4648 	    gimple_call_set_lhs (g, vf);
4649 	    gimple_seq *seq = i == 0 ? ilist : dlist;
4650 	    gimple_seq_add_stmt (seq, g);
4651 	    tree t = build_int_cst (unsigned_type_node, 0);
4652 	    g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4653 	    gimple_seq_add_stmt (seq, g);
4654 	    tree body = create_artificial_label (UNKNOWN_LOCATION);
4655 	    tree header = create_artificial_label (UNKNOWN_LOCATION);
4656 	    tree end = create_artificial_label (UNKNOWN_LOCATION);
4657 	    gimple_seq_add_stmt (seq, gimple_build_goto (header));
4658 	    gimple_seq_add_stmt (seq, gimple_build_label (body));
4659 	    gimple_seq_add_seq (seq, llist[i]);
4660 	    t = build_int_cst (unsigned_type_node, 1);
4661 	    g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4662 	    gimple_seq_add_stmt (seq, g);
4663 	    gimple_seq_add_stmt (seq, gimple_build_label (header));
4664 	    g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4665 	    gimple_seq_add_stmt (seq, g);
4666 	    gimple_seq_add_stmt (seq, gimple_build_label (end));
4667 	  }
4668     }
4669   if (sctx.is_simt)
4670     {
4671       gimple_seq_add_seq (dlist, sctx.simt_dlist);
4672       gimple *g
4673 	= gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4674       gimple_seq_add_stmt (dlist, g);
4675     }
4676 
4677   /* The copyin sequence is not to be executed by the main thread, since
4678      that would result in self-copies.  Perhaps not visible to scalars,
4679      but it certainly is to C++ operator=.  */
4680   if (copyin_seq)
4681     {
4682       x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4683 			   0);
4684       x = build2 (NE_EXPR, boolean_type_node, x,
4685 		  build_int_cst (TREE_TYPE (x), 0));
4686       x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4687       gimplify_and_add (x, ilist);
4688     }
4689 
4690   /* If any copyin variable is passed by reference, we must ensure the
4691      master thread doesn't modify it before it is copied over in all
4692      threads.  Similarly for variables in both firstprivate and
4693      lastprivate clauses we need to ensure the lastprivate copying
4694      happens after firstprivate copying in all threads.  And similarly
4695      for UDRs if initializer expression refers to omp_orig.  */
4696   if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4697     {
4698       /* Don't add any barrier for #pragma omp simd or
4699 	 #pragma omp distribute.  */
4700       if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4701 	  || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4702 	gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4703     }
4704 
4705   /* If max_vf is non-zero, then we can use only a vectorization factor
4706      up to the max_vf we chose.  So stick it into the safelen clause.  */
4707   if (sctx.max_vf)
4708     {
4709       tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4710 				OMP_CLAUSE_SAFELEN);
4711       if (c == NULL_TREE
4712 	  || (TREE_CODE (OMP_CLAUSE_SAFELEN_EXPR (c)) == INTEGER_CST
4713 	      && compare_tree_int (OMP_CLAUSE_SAFELEN_EXPR (c),
4714 				   sctx.max_vf) == 1))
4715 	{
4716 	  c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4717 	  OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4718 						       sctx.max_vf);
4719 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4720 	  gimple_omp_for_set_clauses (ctx->stmt, c);
4721 	}
4722     }
4723 }
4724 
4725 
4726 /* Generate code to implement the LASTPRIVATE clauses.  This is used for
4727    both parallel and workshare constructs.  PREDICATE may be NULL if it's
4728    always true.   */
4729 
4730 static void
4731 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4732 			   omp_context *ctx)
4733 {
4734   tree x, c, label = NULL, orig_clauses = clauses;
4735   bool par_clauses = false;
4736   tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4737 
4738   /* Early exit if there are no lastprivate or linear clauses.  */
4739   for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4740     if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4741 	|| (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4742 	    && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4743       break;
4744   if (clauses == NULL)
4745     {
4746       /* If this was a workshare clause, see if it had been combined
4747 	 with its parallel.  In that case, look for the clauses on the
4748 	 parallel statement itself.  */
4749       if (is_parallel_ctx (ctx))
4750 	return;
4751 
4752       ctx = ctx->outer;
4753       if (ctx == NULL || !is_parallel_ctx (ctx))
4754 	return;
4755 
4756       clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4757 				 OMP_CLAUSE_LASTPRIVATE);
4758       if (clauses == NULL)
4759 	return;
4760       par_clauses = true;
4761     }
4762 
4763   bool maybe_simt = false;
4764   if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4765       && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4766     {
4767       maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4768       simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4769       if (simduid)
4770 	simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4771     }
4772 
4773   if (predicate)
4774     {
4775       gcond *stmt;
4776       tree label_true, arm1, arm2;
4777       enum tree_code pred_code = TREE_CODE (predicate);
4778 
4779       label = create_artificial_label (UNKNOWN_LOCATION);
4780       label_true = create_artificial_label (UNKNOWN_LOCATION);
4781       if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4782 	{
4783 	  arm1 = TREE_OPERAND (predicate, 0);
4784 	  arm2 = TREE_OPERAND (predicate, 1);
4785 	  gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4786 	  gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4787 	}
4788       else
4789 	{
4790 	  arm1 = predicate;
4791 	  gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4792 	  arm2 = boolean_false_node;
4793 	  pred_code = NE_EXPR;
4794 	}
4795       if (maybe_simt)
4796 	{
4797 	  c = build2 (pred_code, boolean_type_node, arm1, arm2);
4798 	  c = fold_convert (integer_type_node, c);
4799 	  simtcond = create_tmp_var (integer_type_node);
4800 	  gimplify_assign (simtcond, c, stmt_list);
4801 	  gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4802 						 1, simtcond);
4803 	  c = create_tmp_var (integer_type_node);
4804 	  gimple_call_set_lhs (g, c);
4805 	  gimple_seq_add_stmt (stmt_list, g);
4806 	  stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4807 				    label_true, label);
4808 	}
4809       else
4810 	stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4811       gimple_seq_add_stmt (stmt_list, stmt);
4812       gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4813     }
4814 
4815   for (c = clauses; c ;)
4816     {
4817       tree var, new_var;
4818       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4819 
4820       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4821 	  || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4822 	      && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4823 	{
4824 	  var = OMP_CLAUSE_DECL (c);
4825 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4826 	      && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4827 	      && is_taskloop_ctx (ctx))
4828 	    {
4829 	      gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4830 	      new_var = lookup_decl (var, ctx->outer);
4831 	    }
4832 	  else
4833 	    {
4834 	      new_var = lookup_decl (var, ctx);
4835 	      /* Avoid uninitialized warnings for lastprivate and
4836 		 for linear iterators.  */
4837 	      if (predicate
4838 		  && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4839 		      || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4840 		TREE_NO_WARNING (new_var) = 1;
4841 	    }
4842 
4843 	  if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4844 	    {
4845 	      tree val = DECL_VALUE_EXPR (new_var);
4846 	      if (TREE_CODE (val) == ARRAY_REF
4847 		  && VAR_P (TREE_OPERAND (val, 0))
4848 		  && lookup_attribute ("omp simd array",
4849 				       DECL_ATTRIBUTES (TREE_OPERAND (val,
4850 								      0))))
4851 		{
4852 		  if (lastlane == NULL)
4853 		    {
4854 		      lastlane = create_tmp_var (unsigned_type_node);
4855 		      gcall *g
4856 			= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4857 						      2, simduid,
4858 						      TREE_OPERAND (val, 1));
4859 		      gimple_call_set_lhs (g, lastlane);
4860 		      gimple_seq_add_stmt (stmt_list, g);
4861 		    }
4862 		  new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4863 				    TREE_OPERAND (val, 0), lastlane,
4864 				    NULL_TREE, NULL_TREE);
4865 		}
4866 	    }
4867 	  else if (maybe_simt)
4868 	    {
4869 	      tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4870 			  ? DECL_VALUE_EXPR (new_var)
4871 			  : new_var);
4872 	      if (simtlast == NULL)
4873 		{
4874 		  simtlast = create_tmp_var (unsigned_type_node);
4875 		  gcall *g = gimple_build_call_internal
4876 		    (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4877 		  gimple_call_set_lhs (g, simtlast);
4878 		  gimple_seq_add_stmt (stmt_list, g);
4879 		}
4880 	      x = build_call_expr_internal_loc
4881 		(UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4882 		 TREE_TYPE (val), 2, val, simtlast);
4883 	      new_var = unshare_expr (new_var);
4884 	      gimplify_assign (new_var, x, stmt_list);
4885 	      new_var = unshare_expr (new_var);
4886 	    }
4887 
4888 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4889 	      && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4890 	    {
4891 	      lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4892 	      gimple_seq_add_seq (stmt_list,
4893 				  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4894 	      OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4895 	    }
4896 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4897 		   && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4898 	    {
4899 	      lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4900 	      gimple_seq_add_seq (stmt_list,
4901 				  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4902 	      OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4903 	    }
4904 
4905 	  x = NULL_TREE;
4906 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4907 	      && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4908 	    {
4909 	      gcc_checking_assert (is_taskloop_ctx (ctx));
4910 	      tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4911 							  ctx->outer->outer);
4912 	      if (is_global_var (ovar))
4913 		x = ovar;
4914 	    }
4915 	  if (!x)
4916 	    x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4917 	  if (omp_is_reference (var))
4918 	    new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4919 	  x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4920 	  gimplify_and_add (x, stmt_list);
4921 	}
4922       c = OMP_CLAUSE_CHAIN (c);
4923       if (c == NULL && !par_clauses)
4924 	{
4925 	  /* If this was a workshare clause, see if it had been combined
4926 	     with its parallel.  In that case, continue looking for the
4927 	     clauses also on the parallel statement itself.  */
4928 	  if (is_parallel_ctx (ctx))
4929 	    break;
4930 
4931 	  ctx = ctx->outer;
4932 	  if (ctx == NULL || !is_parallel_ctx (ctx))
4933 	    break;
4934 
4935 	  c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4936 			       OMP_CLAUSE_LASTPRIVATE);
4937 	  par_clauses = true;
4938 	}
4939     }
4940 
4941   if (label)
4942     gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4943 }
4944 
4945 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4946    (which might be a placeholder).  INNER is true if this is an inner
4947    axis of a multi-axis loop.  FORK and JOIN are (optional) fork and
4948    join markers.  Generate the before-loop forking sequence in
4949    FORK_SEQ and the after-loop joining sequence to JOIN_SEQ.  The
4950    general form of these sequences is
4951 
4952      GOACC_REDUCTION_SETUP
4953      GOACC_FORK
4954      GOACC_REDUCTION_INIT
4955      ...
4956      GOACC_REDUCTION_FINI
4957      GOACC_JOIN
4958      GOACC_REDUCTION_TEARDOWN.  */
4959 
4960 static void
4961 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4962 		       gcall *fork, gcall *join, gimple_seq *fork_seq,
4963 		       gimple_seq *join_seq, omp_context *ctx)
4964 {
4965   gimple_seq before_fork = NULL;
4966   gimple_seq after_fork = NULL;
4967   gimple_seq before_join = NULL;
4968   gimple_seq after_join = NULL;
4969   tree init_code = NULL_TREE, fini_code = NULL_TREE,
4970     setup_code = NULL_TREE, teardown_code = NULL_TREE;
4971   unsigned offset = 0;
4972 
4973   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4974     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4975       {
4976 	tree orig = OMP_CLAUSE_DECL (c);
4977 	tree var = maybe_lookup_decl (orig, ctx);
4978 	tree ref_to_res = NULL_TREE;
4979 	tree incoming, outgoing, v1, v2, v3;
4980 	bool is_private = false;
4981 
4982 	enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4983 	if (rcode == MINUS_EXPR)
4984 	  rcode = PLUS_EXPR;
4985 	else if (rcode == TRUTH_ANDIF_EXPR)
4986 	  rcode = BIT_AND_EXPR;
4987 	else if (rcode == TRUTH_ORIF_EXPR)
4988 	  rcode = BIT_IOR_EXPR;
4989 	tree op = build_int_cst (unsigned_type_node, rcode);
4990 
4991 	if (!var)
4992 	  var = orig;
4993 
4994 	incoming = outgoing = var;
4995 
4996 	if (!inner)
4997 	  {
4998 	    /* See if an outer construct also reduces this variable.  */
4999 	    omp_context *outer = ctx;
5000 
5001 	    while (omp_context *probe = outer->outer)
5002 	      {
5003 		enum gimple_code type = gimple_code (probe->stmt);
5004 		tree cls;
5005 
5006 		switch (type)
5007 		  {
5008 		  case GIMPLE_OMP_FOR:
5009 		    cls = gimple_omp_for_clauses (probe->stmt);
5010 		    break;
5011 
5012 		  case GIMPLE_OMP_TARGET:
5013 		    if (gimple_omp_target_kind (probe->stmt)
5014 			!= GF_OMP_TARGET_KIND_OACC_PARALLEL)
5015 		      goto do_lookup;
5016 
5017 		    cls = gimple_omp_target_clauses (probe->stmt);
5018 		    break;
5019 
5020 		  default:
5021 		    goto do_lookup;
5022 		  }
5023 
5024 		outer = probe;
5025 		for (; cls;  cls = OMP_CLAUSE_CHAIN (cls))
5026 		  if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
5027 		      && orig == OMP_CLAUSE_DECL (cls))
5028 		    {
5029 		      incoming = outgoing = lookup_decl (orig, probe);
5030 		      goto has_outer_reduction;
5031 		    }
5032 		  else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
5033 			    || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
5034 			   && orig == OMP_CLAUSE_DECL (cls))
5035 		    {
5036 		      is_private = true;
5037 		      goto do_lookup;
5038 		    }
5039 	      }
5040 
5041 	  do_lookup:
5042 	    /* This is the outermost construct with this reduction,
5043 	       see if there's a mapping for it.  */
5044 	    if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
5045 		&& maybe_lookup_field (orig, outer) && !is_private)
5046 	      {
5047 		ref_to_res = build_receiver_ref (orig, false, outer);
5048 		if (omp_is_reference (orig))
5049 		  ref_to_res = build_simple_mem_ref (ref_to_res);
5050 
5051 		tree type = TREE_TYPE (var);
5052 		if (POINTER_TYPE_P (type))
5053 		  type = TREE_TYPE (type);
5054 
5055 		outgoing = var;
5056 		incoming = omp_reduction_init_op (loc, rcode, type);
5057 	      }
5058 	    else
5059 	      {
5060 		/* Try to look at enclosing contexts for reduction var,
5061 		   use original if no mapping found.  */
5062 		tree t = NULL_TREE;
5063 		omp_context *c = ctx->outer;
5064 		while (c && !t)
5065 		  {
5066 		    t = maybe_lookup_decl (orig, c);
5067 		    c = c->outer;
5068 		  }
5069 		incoming = outgoing = (t ? t : orig);
5070 	      }
5071 
5072 	  has_outer_reduction:;
5073 	  }
5074 
5075 	if (!ref_to_res)
5076 	  ref_to_res = integer_zero_node;
5077 
5078 	if (omp_is_reference (orig))
5079 	  {
5080 	    tree type = TREE_TYPE (var);
5081 	    const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5082 
5083 	    if (!inner)
5084 	      {
5085 		tree x = create_tmp_var (TREE_TYPE (type), id);
5086 		gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5087 	      }
5088 
5089 	    v1 = create_tmp_var (type, id);
5090 	    v2 = create_tmp_var (type, id);
5091 	    v3 = create_tmp_var (type, id);
5092 
5093 	    gimplify_assign (v1, var, fork_seq);
5094 	    gimplify_assign (v2, var, fork_seq);
5095 	    gimplify_assign (v3, var, fork_seq);
5096 
5097 	    var = build_simple_mem_ref (var);
5098 	    v1 = build_simple_mem_ref (v1);
5099 	    v2 = build_simple_mem_ref (v2);
5100 	    v3 = build_simple_mem_ref (v3);
5101 	    outgoing = build_simple_mem_ref (outgoing);
5102 
5103 	    if (!TREE_CONSTANT (incoming))
5104 	      incoming = build_simple_mem_ref (incoming);
5105 	  }
5106 	else
5107 	  v1 = v2 = v3 = var;
5108 
5109 	/* Determine position in reduction buffer, which may be used
5110 	   by target.  */
5111 	enum machine_mode mode = TYPE_MODE (TREE_TYPE (var));
5112 	unsigned align = GET_MODE_ALIGNMENT (mode) /  BITS_PER_UNIT;
5113 	offset = (offset + align - 1) & ~(align - 1);
5114 	tree off = build_int_cst (sizetype, offset);
5115 	offset += GET_MODE_SIZE (mode);
5116 
5117 	if (!init_code)
5118 	  {
5119 	    init_code = build_int_cst (integer_type_node,
5120 				       IFN_GOACC_REDUCTION_INIT);
5121 	    fini_code = build_int_cst (integer_type_node,
5122 				       IFN_GOACC_REDUCTION_FINI);
5123 	    setup_code = build_int_cst (integer_type_node,
5124 					IFN_GOACC_REDUCTION_SETUP);
5125 	    teardown_code = build_int_cst (integer_type_node,
5126 					   IFN_GOACC_REDUCTION_TEARDOWN);
5127 	  }
5128 
5129 	tree setup_call
5130 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5131 					  TREE_TYPE (var), 6, setup_code,
5132 					  unshare_expr (ref_to_res),
5133 					  incoming, level, op, off);
5134 	tree init_call
5135 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5136 					  TREE_TYPE (var), 6, init_code,
5137 					  unshare_expr (ref_to_res),
5138 					  v1, level, op, off);
5139 	tree fini_call
5140 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5141 					  TREE_TYPE (var), 6, fini_code,
5142 					  unshare_expr (ref_to_res),
5143 					  v2, level, op, off);
5144 	tree teardown_call
5145 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5146 					  TREE_TYPE (var), 6, teardown_code,
5147 					  ref_to_res, v3, level, op, off);
5148 
5149 	gimplify_assign (v1, setup_call, &before_fork);
5150 	gimplify_assign (v2, init_call, &after_fork);
5151 	gimplify_assign (v3, fini_call, &before_join);
5152 	gimplify_assign (outgoing, teardown_call, &after_join);
5153       }
5154 
5155   /* Now stitch things together.  */
5156   gimple_seq_add_seq (fork_seq, before_fork);
5157   if (fork)
5158     gimple_seq_add_stmt (fork_seq, fork);
5159   gimple_seq_add_seq (fork_seq, after_fork);
5160 
5161   gimple_seq_add_seq (join_seq, before_join);
5162   if (join)
5163     gimple_seq_add_stmt (join_seq, join);
5164   gimple_seq_add_seq (join_seq, after_join);
5165 }
5166 
5167 /* Generate code to implement the REDUCTION clauses.  */
5168 
5169 static void
5170 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5171 {
5172   gimple_seq sub_seq = NULL;
5173   gimple *stmt;
5174   tree x, c;
5175   int count = 0;
5176 
5177   /* OpenACC loop reductions are handled elsewhere.  */
5178   if (is_gimple_omp_oacc (ctx->stmt))
5179     return;
5180 
5181   /* SIMD reductions are handled in lower_rec_input_clauses.  */
5182   if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5183       && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5184     return;
5185 
5186   /* First see if there is exactly one reduction clause.  Use OMP_ATOMIC
5187      update in that case, otherwise use a lock.  */
5188   for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5189     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5190       {
5191 	if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5192 	    || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5193 	  {
5194 	    /* Never use OMP_ATOMIC for array reductions or UDRs.  */
5195 	    count = -1;
5196 	    break;
5197 	  }
5198 	count++;
5199       }
5200 
5201   if (count == 0)
5202     return;
5203 
5204   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5205     {
5206       tree var, ref, new_var, orig_var;
5207       enum tree_code code;
5208       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5209 
5210       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5211 	continue;
5212 
5213       enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5214       orig_var = var = OMP_CLAUSE_DECL (c);
5215       if (TREE_CODE (var) == MEM_REF)
5216 	{
5217 	  var = TREE_OPERAND (var, 0);
5218 	  if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5219 	    var = TREE_OPERAND (var, 0);
5220 	  if (TREE_CODE (var) == ADDR_EXPR)
5221 	    var = TREE_OPERAND (var, 0);
5222 	  else
5223 	    {
5224 	      /* If this is a pointer or referenced based array
5225 		 section, the var could be private in the outer
5226 		 context e.g. on orphaned loop construct.  Pretend this
5227 		 is private variable's outer reference.  */
5228 	      ccode = OMP_CLAUSE_PRIVATE;
5229 	      if (TREE_CODE (var) == INDIRECT_REF)
5230 		var = TREE_OPERAND (var, 0);
5231 	    }
5232 	  orig_var = var;
5233 	  if (is_variable_sized (var))
5234 	    {
5235 	      gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5236 	      var = DECL_VALUE_EXPR (var);
5237 	      gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5238 	      var = TREE_OPERAND (var, 0);
5239 	      gcc_assert (DECL_P (var));
5240 	    }
5241 	}
5242       new_var = lookup_decl (var, ctx);
5243       if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5244 	new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5245       ref = build_outer_var_ref (var, ctx, ccode);
5246       code = OMP_CLAUSE_REDUCTION_CODE (c);
5247 
5248       /* reduction(-:var) sums up the partial results, so it acts
5249 	 identically to reduction(+:var).  */
5250       if (code == MINUS_EXPR)
5251         code = PLUS_EXPR;
5252 
5253       if (count == 1)
5254 	{
5255 	  tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5256 
5257 	  addr = save_expr (addr);
5258 	  ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5259 	  x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5260 	  x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5261 	  gimplify_and_add (x, stmt_seqp);
5262 	  return;
5263 	}
5264       else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5265 	{
5266 	  tree d = OMP_CLAUSE_DECL (c);
5267 	  tree type = TREE_TYPE (d);
5268 	  tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5269 	  tree i = create_tmp_var (TREE_TYPE (v), NULL);
5270 	  tree ptype = build_pointer_type (TREE_TYPE (type));
5271 	  tree bias = TREE_OPERAND (d, 1);
5272 	  d = TREE_OPERAND (d, 0);
5273 	  if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5274 	    {
5275 	      tree b = TREE_OPERAND (d, 1);
5276 	      b = maybe_lookup_decl (b, ctx);
5277 	      if (b == NULL)
5278 		{
5279 		  b = TREE_OPERAND (d, 1);
5280 		  b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5281 		}
5282 	      if (integer_zerop (bias))
5283 		bias = b;
5284 	      else
5285 		{
5286 		  bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5287 		  bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5288 					  TREE_TYPE (b), b, bias);
5289 		}
5290 	      d = TREE_OPERAND (d, 0);
5291 	    }
5292 	  /* For ref build_outer_var_ref already performs this, so
5293 	     only new_var needs a dereference.  */
5294 	  if (TREE_CODE (d) == INDIRECT_REF)
5295 	    {
5296 	      new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5297 	      gcc_assert (omp_is_reference (var) && var == orig_var);
5298 	    }
5299 	  else if (TREE_CODE (d) == ADDR_EXPR)
5300 	    {
5301 	      if (orig_var == var)
5302 		{
5303 		  new_var = build_fold_addr_expr (new_var);
5304 		  ref = build_fold_addr_expr (ref);
5305 		}
5306 	    }
5307 	  else
5308 	    {
5309 	      gcc_assert (orig_var == var);
5310 	      if (omp_is_reference (var))
5311 		ref = build_fold_addr_expr (ref);
5312 	    }
5313 	  if (DECL_P (v))
5314 	    {
5315 	      tree t = maybe_lookup_decl (v, ctx);
5316 	      if (t)
5317 		v = t;
5318 	      else
5319 		v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5320 	      gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5321 	    }
5322 	  if (!integer_zerop (bias))
5323 	    {
5324 	      bias = fold_convert_loc (clause_loc, sizetype, bias);
5325 	      new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5326 					 TREE_TYPE (new_var), new_var,
5327 					 unshare_expr (bias));
5328 	      ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5329 					 TREE_TYPE (ref), ref, bias);
5330 	    }
5331 	  new_var = fold_convert_loc (clause_loc, ptype, new_var);
5332 	  ref = fold_convert_loc (clause_loc, ptype, ref);
5333 	  tree m = create_tmp_var (ptype, NULL);
5334 	  gimplify_assign (m, new_var, stmt_seqp);
5335 	  new_var = m;
5336 	  m = create_tmp_var (ptype, NULL);
5337 	  gimplify_assign (m, ref, stmt_seqp);
5338 	  ref = m;
5339 	  gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5340 	  tree body = create_artificial_label (UNKNOWN_LOCATION);
5341 	  tree end = create_artificial_label (UNKNOWN_LOCATION);
5342 	  gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5343 	  tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5344 	  tree out = build_simple_mem_ref_loc (clause_loc, ref);
5345 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5346 	    {
5347 	      tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5348 	      tree decl_placeholder
5349 		= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5350 	      SET_DECL_VALUE_EXPR (placeholder, out);
5351 	      DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5352 	      SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5353 	      DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5354 	      lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5355 	      gimple_seq_add_seq (&sub_seq,
5356 				  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5357 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5358 	      OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5359 	      OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5360 	    }
5361 	  else
5362 	    {
5363 	      x = build2 (code, TREE_TYPE (out), out, priv);
5364 	      out = unshare_expr (out);
5365 	      gimplify_assign (out, x, &sub_seq);
5366 	    }
5367 	  gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5368 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
5369 	  gimple_seq_add_stmt (&sub_seq, g);
5370 	  g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5371 				   TYPE_SIZE_UNIT (TREE_TYPE (type)));
5372 	  gimple_seq_add_stmt (&sub_seq, g);
5373 	  g = gimple_build_assign (i, PLUS_EXPR, i,
5374 				   build_int_cst (TREE_TYPE (i), 1));
5375 	  gimple_seq_add_stmt (&sub_seq, g);
5376 	  g = gimple_build_cond (LE_EXPR, i, v, body, end);
5377 	  gimple_seq_add_stmt (&sub_seq, g);
5378 	  gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5379 	}
5380       else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5381 	{
5382 	  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5383 
5384 	  if (omp_is_reference (var)
5385 	      && !useless_type_conversion_p (TREE_TYPE (placeholder),
5386 					     TREE_TYPE (ref)))
5387 	    ref = build_fold_addr_expr_loc (clause_loc, ref);
5388 	  SET_DECL_VALUE_EXPR (placeholder, ref);
5389 	  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5390 	  lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5391 	  gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5392 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5393 	  OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5394 	}
5395       else
5396 	{
5397 	  x = build2 (code, TREE_TYPE (ref), ref, new_var);
5398 	  ref = build_outer_var_ref (var, ctx);
5399 	  gimplify_assign (ref, x, &sub_seq);
5400 	}
5401     }
5402 
5403   stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5404 			    0);
5405   gimple_seq_add_stmt (stmt_seqp, stmt);
5406 
5407   gimple_seq_add_seq (stmt_seqp, sub_seq);
5408 
5409   stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5410 			    0);
5411   gimple_seq_add_stmt (stmt_seqp, stmt);
5412 }
5413 
5414 
5415 /* Generate code to implement the COPYPRIVATE clauses.  */
5416 
5417 static void
5418 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5419 			    omp_context *ctx)
5420 {
5421   tree c;
5422 
5423   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5424     {
5425       tree var, new_var, ref, x;
5426       bool by_ref;
5427       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5428 
5429       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5430 	continue;
5431 
5432       var = OMP_CLAUSE_DECL (c);
5433       by_ref = use_pointer_for_field (var, NULL);
5434 
5435       ref = build_sender_ref (var, ctx);
5436       x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5437       if (by_ref)
5438 	{
5439 	  x = build_fold_addr_expr_loc (clause_loc, new_var);
5440 	  x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5441 	}
5442       gimplify_assign (ref, x, slist);
5443 
5444       ref = build_receiver_ref (var, false, ctx);
5445       if (by_ref)
5446 	{
5447 	  ref = fold_convert_loc (clause_loc,
5448 				  build_pointer_type (TREE_TYPE (new_var)),
5449 				  ref);
5450 	  ref = build_fold_indirect_ref_loc (clause_loc, ref);
5451 	}
5452       if (omp_is_reference (var))
5453 	{
5454 	  ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5455 	  ref = build_simple_mem_ref_loc (clause_loc, ref);
5456 	  new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5457 	}
5458       x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5459       gimplify_and_add (x, rlist);
5460     }
5461 }
5462 
5463 
5464 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5465    and REDUCTION from the sender (aka parent) side.  */
5466 
5467 static void
5468 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5469     		    omp_context *ctx)
5470 {
5471   tree c, t;
5472   int ignored_looptemp = 0;
5473   bool is_taskloop = false;
5474 
5475   /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5476      by GOMP_taskloop.  */
5477   if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5478     {
5479       ignored_looptemp = 2;
5480       is_taskloop = true;
5481     }
5482 
5483   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5484     {
5485       tree val, ref, x, var;
5486       bool by_ref, do_in = false, do_out = false;
5487       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5488 
5489       switch (OMP_CLAUSE_CODE (c))
5490 	{
5491 	case OMP_CLAUSE_PRIVATE:
5492 	  if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5493 	    break;
5494 	  continue;
5495 	case OMP_CLAUSE_FIRSTPRIVATE:
5496 	case OMP_CLAUSE_COPYIN:
5497 	case OMP_CLAUSE_LASTPRIVATE:
5498 	case OMP_CLAUSE_REDUCTION:
5499 	  break;
5500 	case OMP_CLAUSE_SHARED:
5501 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5502 	    break;
5503 	  continue;
5504 	case OMP_CLAUSE__LOOPTEMP_:
5505 	  if (ignored_looptemp)
5506 	    {
5507 	      ignored_looptemp--;
5508 	      continue;
5509 	    }
5510 	  break;
5511 	default:
5512 	  continue;
5513 	}
5514 
5515       val = OMP_CLAUSE_DECL (c);
5516       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5517 	  && TREE_CODE (val) == MEM_REF)
5518 	{
5519 	  val = TREE_OPERAND (val, 0);
5520 	  if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5521 	    val = TREE_OPERAND (val, 0);
5522 	  if (TREE_CODE (val) == INDIRECT_REF
5523 	      || TREE_CODE (val) == ADDR_EXPR)
5524 	    val = TREE_OPERAND (val, 0);
5525 	  if (is_variable_sized (val))
5526 	    continue;
5527 	}
5528 
5529       /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5530 	 outer taskloop region.  */
5531       omp_context *ctx_for_o = ctx;
5532       if (is_taskloop
5533 	  && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5534 	  && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5535 	ctx_for_o = ctx->outer;
5536 
5537       var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5538 
5539       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5540 	  && is_global_var (var))
5541 	continue;
5542 
5543       t = omp_member_access_dummy_var (var);
5544       if (t)
5545 	{
5546 	  var = DECL_VALUE_EXPR (var);
5547 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5548 	  if (o != t)
5549 	    var = unshare_and_remap (var, t, o);
5550 	  else
5551 	    var = unshare_expr (var);
5552 	}
5553 
5554       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5555 	{
5556 	  /* Handle taskloop firstprivate/lastprivate, where the
5557 	     lastprivate on GIMPLE_OMP_TASK is represented as
5558 	     OMP_CLAUSE_SHARED_FIRSTPRIVATE.  */
5559 	  tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5560 	  x = omp_build_component_ref (ctx->sender_decl, f);
5561 	  if (use_pointer_for_field (val, ctx))
5562 	    var = build_fold_addr_expr (var);
5563 	  gimplify_assign (x, var, ilist);
5564 	  DECL_ABSTRACT_ORIGIN (f) = NULL;
5565 	  continue;
5566 	}
5567 
5568       if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5569 	   || val == OMP_CLAUSE_DECL (c))
5570 	  && is_variable_sized (val))
5571 	continue;
5572       by_ref = use_pointer_for_field (val, NULL);
5573 
5574       switch (OMP_CLAUSE_CODE (c))
5575 	{
5576 	case OMP_CLAUSE_FIRSTPRIVATE:
5577 	  if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5578 	      && !by_ref
5579 	      && is_task_ctx (ctx))
5580 	    TREE_NO_WARNING (var) = 1;
5581 	  do_in = true;
5582 	  break;
5583 
5584 	case OMP_CLAUSE_PRIVATE:
5585 	case OMP_CLAUSE_COPYIN:
5586 	case OMP_CLAUSE__LOOPTEMP_:
5587 	  do_in = true;
5588 	  break;
5589 
5590 	case OMP_CLAUSE_LASTPRIVATE:
5591 	  if (by_ref || omp_is_reference (val))
5592 	    {
5593 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5594 		continue;
5595 	      do_in = true;
5596 	    }
5597 	  else
5598 	    {
5599 	      do_out = true;
5600 	      if (lang_hooks.decls.omp_private_outer_ref (val))
5601 		do_in = true;
5602 	    }
5603 	  break;
5604 
5605 	case OMP_CLAUSE_REDUCTION:
5606 	  do_in = true;
5607 	  if (val == OMP_CLAUSE_DECL (c))
5608 	    do_out = !(by_ref || omp_is_reference (val));
5609 	  else
5610 	    by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5611 	  break;
5612 
5613 	default:
5614 	  gcc_unreachable ();
5615 	}
5616 
5617       if (do_in)
5618 	{
5619 	  ref = build_sender_ref (val, ctx);
5620 	  x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5621 	  gimplify_assign (ref, x, ilist);
5622 	  if (is_task_ctx (ctx))
5623 	    DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5624 	}
5625 
5626       if (do_out)
5627 	{
5628 	  ref = build_sender_ref (val, ctx);
5629 	  gimplify_assign (var, ref, olist);
5630 	}
5631     }
5632 }
5633 
5634 /* Generate code to implement SHARED from the sender (aka parent)
5635    side.  This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5636    list things that got automatically shared.  */
5637 
5638 static void
5639 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5640 {
5641   tree var, ovar, nvar, t, f, x, record_type;
5642 
5643   if (ctx->record_type == NULL)
5644     return;
5645 
5646   record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5647   for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5648     {
5649       ovar = DECL_ABSTRACT_ORIGIN (f);
5650       if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5651 	continue;
5652 
5653       nvar = maybe_lookup_decl (ovar, ctx);
5654       if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5655 	continue;
5656 
5657       /* If CTX is a nested parallel directive.  Find the immediately
5658 	 enclosing parallel or workshare construct that contains a
5659 	 mapping for OVAR.  */
5660       var = lookup_decl_in_outer_ctx (ovar, ctx);
5661 
5662       t = omp_member_access_dummy_var (var);
5663       if (t)
5664 	{
5665 	  var = DECL_VALUE_EXPR (var);
5666 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5667 	  if (o != t)
5668 	    var = unshare_and_remap (var, t, o);
5669 	  else
5670 	    var = unshare_expr (var);
5671 	}
5672 
5673       if (use_pointer_for_field (ovar, ctx))
5674 	{
5675 	  x = build_sender_ref (ovar, ctx);
5676 	  var = build_fold_addr_expr (var);
5677 	  gimplify_assign (x, var, ilist);
5678 	}
5679       else
5680 	{
5681 	  x = build_sender_ref (ovar, ctx);
5682 	  gimplify_assign (x, var, ilist);
5683 
5684 	  if (!TREE_READONLY (var)
5685 	      /* We don't need to receive a new reference to a result
5686 	         or parm decl.  In fact we may not store to it as we will
5687 		 invalidate any pending RSO and generate wrong gimple
5688 		 during inlining.  */
5689 	      && !((TREE_CODE (var) == RESULT_DECL
5690 		    || TREE_CODE (var) == PARM_DECL)
5691 		   && DECL_BY_REFERENCE (var)))
5692 	    {
5693 	      x = build_sender_ref (ovar, ctx);
5694 	      gimplify_assign (var, x, olist);
5695 	    }
5696 	}
5697     }
5698 }
5699 
5700 /* Emit an OpenACC head marker call, encapulating the partitioning and
5701    other information that must be processed by the target compiler.
5702    Return the maximum number of dimensions the associated loop might
5703    be partitioned over.  */
5704 
5705 static unsigned
5706 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5707 		      gimple_seq *seq, omp_context *ctx)
5708 {
5709   unsigned levels = 0;
5710   unsigned tag = 0;
5711   tree gang_static = NULL_TREE;
5712   auto_vec<tree, 5> args;
5713 
5714   args.quick_push (build_int_cst
5715 		   (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5716   args.quick_push (ddvar);
5717   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5718     {
5719       switch (OMP_CLAUSE_CODE (c))
5720 	{
5721 	case OMP_CLAUSE_GANG:
5722 	  tag |= OLF_DIM_GANG;
5723 	  gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5724 	  /* static:* is represented by -1, and we can ignore it, as
5725 	     scheduling is always static.  */
5726 	  if (gang_static && integer_minus_onep (gang_static))
5727 	    gang_static = NULL_TREE;
5728 	  levels++;
5729 	  break;
5730 
5731 	case OMP_CLAUSE_WORKER:
5732 	  tag |= OLF_DIM_WORKER;
5733 	  levels++;
5734 	  break;
5735 
5736 	case OMP_CLAUSE_VECTOR:
5737 	  tag |= OLF_DIM_VECTOR;
5738 	  levels++;
5739 	  break;
5740 
5741 	case OMP_CLAUSE_SEQ:
5742 	  tag |= OLF_SEQ;
5743 	  break;
5744 
5745 	case OMP_CLAUSE_AUTO:
5746 	  tag |= OLF_AUTO;
5747 	  break;
5748 
5749 	case OMP_CLAUSE_INDEPENDENT:
5750 	  tag |= OLF_INDEPENDENT;
5751 	  break;
5752 
5753 	case OMP_CLAUSE_TILE:
5754 	  tag |= OLF_TILE;
5755 	  break;
5756 
5757 	default:
5758 	  continue;
5759 	}
5760     }
5761 
5762   if (gang_static)
5763     {
5764       if (DECL_P (gang_static))
5765 	gang_static = build_outer_var_ref (gang_static, ctx);
5766       tag |= OLF_GANG_STATIC;
5767     }
5768 
5769   /* In a parallel region, loops are implicitly INDEPENDENT.  */
5770   omp_context *tgt = enclosing_target_ctx (ctx);
5771   if (!tgt || is_oacc_parallel (tgt))
5772     tag |= OLF_INDEPENDENT;
5773 
5774   if (tag & OLF_TILE)
5775     /* Tiling could use all 3 levels.  */
5776     levels = 3;
5777   else
5778     {
5779       /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5780 	 Ensure at least one level, or 2 for possible auto
5781 	 partitioning */
5782       bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5783 				  << OLF_DIM_BASE) | OLF_SEQ));
5784 
5785       if (levels < 1u + maybe_auto)
5786 	levels = 1u + maybe_auto;
5787     }
5788 
5789   args.quick_push (build_int_cst (integer_type_node, levels));
5790   args.quick_push (build_int_cst (integer_type_node, tag));
5791   if (gang_static)
5792     args.quick_push (gang_static);
5793 
5794   gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5795   gimple_set_location (call, loc);
5796   gimple_set_lhs (call, ddvar);
5797   gimple_seq_add_stmt (seq, call);
5798 
5799   return levels;
5800 }
5801 
5802 /* Emit an OpenACC lopp head or tail marker to SEQ.  LEVEL is the
5803    partitioning level of the enclosed region.  */
5804 
5805 static void
5806 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5807 			tree tofollow, gimple_seq *seq)
5808 {
5809   int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5810 		     : IFN_UNIQUE_OACC_TAIL_MARK);
5811   tree marker = build_int_cst (integer_type_node, marker_kind);
5812   int nargs = 2 + (tofollow != NULL_TREE);
5813   gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5814 					    marker, ddvar, tofollow);
5815   gimple_set_location (call, loc);
5816   gimple_set_lhs (call, ddvar);
5817   gimple_seq_add_stmt (seq, call);
5818 }
5819 
5820 /* Generate the before and after OpenACC loop sequences.  CLAUSES are
5821    the loop clauses, from which we extract reductions.  Initialize
5822    HEAD and TAIL.  */
5823 
5824 static void
5825 lower_oacc_head_tail (location_t loc, tree clauses,
5826 		      gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5827 {
5828   bool inner = false;
5829   tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5830   gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5831 
5832   unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5833   tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5834   tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5835 
5836   gcc_assert (count);
5837   for (unsigned done = 1; count; count--, done++)
5838     {
5839       gimple_seq fork_seq = NULL;
5840       gimple_seq join_seq = NULL;
5841 
5842       tree place = build_int_cst (integer_type_node, -1);
5843       gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5844 						fork_kind, ddvar, place);
5845       gimple_set_location (fork, loc);
5846       gimple_set_lhs (fork, ddvar);
5847 
5848       gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5849 						join_kind, ddvar, place);
5850       gimple_set_location (join, loc);
5851       gimple_set_lhs (join, ddvar);
5852 
5853       /* Mark the beginning of this level sequence.  */
5854       if (inner)
5855 	lower_oacc_loop_marker (loc, ddvar, true,
5856 				build_int_cst (integer_type_node, count),
5857 				&fork_seq);
5858       lower_oacc_loop_marker (loc, ddvar, false,
5859 			      build_int_cst (integer_type_node, done),
5860 			      &join_seq);
5861 
5862       lower_oacc_reductions (loc, clauses, place, inner,
5863 			     fork, join, &fork_seq, &join_seq,  ctx);
5864 
5865       /* Append this level to head. */
5866       gimple_seq_add_seq (head, fork_seq);
5867       /* Prepend it to tail.  */
5868       gimple_seq_add_seq (&join_seq, *tail);
5869       *tail = join_seq;
5870 
5871       inner = true;
5872     }
5873 
5874   /* Mark the end of the sequence.  */
5875   lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5876   lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5877 }
5878 
5879 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5880    catch handler and return it.  This prevents programs from violating the
5881    structured block semantics with throws.  */
5882 
5883 static gimple_seq
5884 maybe_catch_exception (gimple_seq body)
5885 {
5886   gimple *g;
5887   tree decl;
5888 
5889   if (!flag_exceptions)
5890     return body;
5891 
5892   if (lang_hooks.eh_protect_cleanup_actions != NULL)
5893     decl = lang_hooks.eh_protect_cleanup_actions ();
5894   else
5895     decl = builtin_decl_explicit (BUILT_IN_TRAP);
5896 
5897   g = gimple_build_eh_must_not_throw (decl);
5898   g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5899       			GIMPLE_TRY_CATCH);
5900 
5901  return gimple_seq_alloc_with_stmt (g);
5902 }
5903 
5904 
5905 /* Routines to lower OMP directives into OMP-GIMPLE.  */
5906 
5907 /* If ctx is a worksharing context inside of a cancellable parallel
5908    region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5909    and conditional branch to parallel's cancel_label to handle
5910    cancellation in the implicit barrier.  */
5911 
5912 static void
5913 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5914 {
5915   gimple *omp_return = gimple_seq_last_stmt (*body);
5916   gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5917   if (gimple_omp_return_nowait_p (omp_return))
5918     return;
5919   if (ctx->outer
5920       && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5921       && ctx->outer->cancellable)
5922     {
5923       tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5924       tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5925       tree lhs = create_tmp_var (c_bool_type);
5926       gimple_omp_return_set_lhs (omp_return, lhs);
5927       tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5928       gimple *g = gimple_build_cond (NE_EXPR, lhs,
5929 				    fold_convert (c_bool_type,
5930 						  boolean_false_node),
5931 				    ctx->outer->cancel_label, fallthru_label);
5932       gimple_seq_add_stmt (body, g);
5933       gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5934     }
5935 }
5936 
5937 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5938    CTX is the enclosing OMP context for the current statement.  */
5939 
5940 static void
5941 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5942 {
5943   tree block, control;
5944   gimple_stmt_iterator tgsi;
5945   gomp_sections *stmt;
5946   gimple *t;
5947   gbind *new_stmt, *bind;
5948   gimple_seq ilist, dlist, olist, new_body;
5949 
5950   stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5951 
5952   push_gimplify_context ();
5953 
5954   dlist = NULL;
5955   ilist = NULL;
5956   lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5957       			   &ilist, &dlist, ctx, NULL);
5958 
5959   new_body = gimple_omp_body (stmt);
5960   gimple_omp_set_body (stmt, NULL);
5961   tgsi = gsi_start (new_body);
5962   for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5963     {
5964       omp_context *sctx;
5965       gimple *sec_start;
5966 
5967       sec_start = gsi_stmt (tgsi);
5968       sctx = maybe_lookup_ctx (sec_start);
5969       gcc_assert (sctx);
5970 
5971       lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5972       gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5973 			    GSI_CONTINUE_LINKING);
5974       gimple_omp_set_body (sec_start, NULL);
5975 
5976       if (gsi_one_before_end_p (tgsi))
5977 	{
5978 	  gimple_seq l = NULL;
5979 	  lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5980 				     &l, ctx);
5981 	  gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5982 	  gimple_omp_section_set_last (sec_start);
5983 	}
5984 
5985       gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5986 			GSI_CONTINUE_LINKING);
5987     }
5988 
5989   block = make_node (BLOCK);
5990   bind = gimple_build_bind (NULL, new_body, block);
5991 
5992   olist = NULL;
5993   lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5994 
5995   block = make_node (BLOCK);
5996   new_stmt = gimple_build_bind (NULL, NULL, block);
5997   gsi_replace (gsi_p, new_stmt, true);
5998 
5999   pop_gimplify_context (new_stmt);
6000   gimple_bind_append_vars (new_stmt, ctx->block_vars);
6001   BLOCK_VARS (block) = gimple_bind_vars (bind);
6002   if (BLOCK_VARS (block))
6003     TREE_USED (block) = 1;
6004 
6005   new_body = NULL;
6006   gimple_seq_add_seq (&new_body, ilist);
6007   gimple_seq_add_stmt (&new_body, stmt);
6008   gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
6009   gimple_seq_add_stmt (&new_body, bind);
6010 
6011   control = create_tmp_var (unsigned_type_node, ".section");
6012   t = gimple_build_omp_continue (control, control);
6013   gimple_omp_sections_set_control (stmt, control);
6014   gimple_seq_add_stmt (&new_body, t);
6015 
6016   gimple_seq_add_seq (&new_body, olist);
6017   if (ctx->cancellable)
6018     gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
6019   gimple_seq_add_seq (&new_body, dlist);
6020 
6021   new_body = maybe_catch_exception (new_body);
6022 
6023   bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
6024 				 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6025   t = gimple_build_omp_return (nowait);
6026   gimple_seq_add_stmt (&new_body, t);
6027   maybe_add_implicit_barrier_cancel (ctx, &new_body);
6028 
6029   gimple_bind_set_body (new_stmt, new_body);
6030 }
6031 
6032 
6033 /* A subroutine of lower_omp_single.  Expand the simple form of
6034    a GIMPLE_OMP_SINGLE, without a copyprivate clause:
6035 
6036      	if (GOMP_single_start ())
6037 	  BODY;
6038 	[ GOMP_barrier (); ]	-> unless 'nowait' is present.
6039 
6040   FIXME.  It may be better to delay expanding the logic of this until
6041   pass_expand_omp.  The expanded logic may make the job more difficult
6042   to a synchronization analysis pass.  */
6043 
6044 static void
6045 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
6046 {
6047   location_t loc = gimple_location (single_stmt);
6048   tree tlabel = create_artificial_label (loc);
6049   tree flabel = create_artificial_label (loc);
6050   gimple *call, *cond;
6051   tree lhs, decl;
6052 
6053   decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
6054   lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
6055   call = gimple_build_call (decl, 0);
6056   gimple_call_set_lhs (call, lhs);
6057   gimple_seq_add_stmt (pre_p, call);
6058 
6059   cond = gimple_build_cond (EQ_EXPR, lhs,
6060 			    fold_convert_loc (loc, TREE_TYPE (lhs),
6061 					      boolean_true_node),
6062 			    tlabel, flabel);
6063   gimple_seq_add_stmt (pre_p, cond);
6064   gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
6065   gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6066   gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
6067 }
6068 
6069 
6070 /* A subroutine of lower_omp_single.  Expand the simple form of
6071    a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6072 
6073 	#pragma omp single copyprivate (a, b, c)
6074 
6075    Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6076 
6077       {
6078 	if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6079 	  {
6080 	    BODY;
6081 	    copyout.a = a;
6082 	    copyout.b = b;
6083 	    copyout.c = c;
6084 	    GOMP_single_copy_end (&copyout);
6085 	  }
6086 	else
6087 	  {
6088 	    a = copyout_p->a;
6089 	    b = copyout_p->b;
6090 	    c = copyout_p->c;
6091 	  }
6092 	GOMP_barrier ();
6093       }
6094 
6095   FIXME.  It may be better to delay expanding the logic of this until
6096   pass_expand_omp.  The expanded logic may make the job more difficult
6097   to a synchronization analysis pass.  */
6098 
6099 static void
6100 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6101 		       omp_context *ctx)
6102 {
6103   tree ptr_type, t, l0, l1, l2, bfn_decl;
6104   gimple_seq copyin_seq;
6105   location_t loc = gimple_location (single_stmt);
6106 
6107   ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6108 
6109   ptr_type = build_pointer_type (ctx->record_type);
6110   ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6111 
6112   l0 = create_artificial_label (loc);
6113   l1 = create_artificial_label (loc);
6114   l2 = create_artificial_label (loc);
6115 
6116   bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6117   t = build_call_expr_loc (loc, bfn_decl, 0);
6118   t = fold_convert_loc (loc, ptr_type, t);
6119   gimplify_assign (ctx->receiver_decl, t, pre_p);
6120 
6121   t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6122 	      build_int_cst (ptr_type, 0));
6123   t = build3 (COND_EXPR, void_type_node, t,
6124 	      build_and_jump (&l0), build_and_jump (&l1));
6125   gimplify_and_add (t, pre_p);
6126 
6127   gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6128 
6129   gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6130 
6131   copyin_seq = NULL;
6132   lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6133 			      &copyin_seq, ctx);
6134 
6135   t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6136   bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6137   t = build_call_expr_loc (loc, bfn_decl, 1, t);
6138   gimplify_and_add (t, pre_p);
6139 
6140   t = build_and_jump (&l2);
6141   gimplify_and_add (t, pre_p);
6142 
6143   gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6144 
6145   gimple_seq_add_seq (pre_p, copyin_seq);
6146 
6147   gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6148 }
6149 
6150 
6151 /* Expand code for an OpenMP single directive.  */
6152 
6153 static void
6154 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6155 {
6156   tree block;
6157   gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6158   gbind *bind;
6159   gimple_seq bind_body, bind_body_tail = NULL, dlist;
6160 
6161   push_gimplify_context ();
6162 
6163   block = make_node (BLOCK);
6164   bind = gimple_build_bind (NULL, NULL, block);
6165   gsi_replace (gsi_p, bind, true);
6166   bind_body = NULL;
6167   dlist = NULL;
6168   lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6169 			   &bind_body, &dlist, ctx, NULL);
6170   lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6171 
6172   gimple_seq_add_stmt (&bind_body, single_stmt);
6173 
6174   if (ctx->record_type)
6175     lower_omp_single_copy (single_stmt, &bind_body, ctx);
6176   else
6177     lower_omp_single_simple (single_stmt, &bind_body);
6178 
6179   gimple_omp_set_body (single_stmt, NULL);
6180 
6181   gimple_seq_add_seq (&bind_body, dlist);
6182 
6183   bind_body = maybe_catch_exception (bind_body);
6184 
6185   bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6186 				 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6187   gimple *g = gimple_build_omp_return (nowait);
6188   gimple_seq_add_stmt (&bind_body_tail, g);
6189   maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6190   if (ctx->record_type)
6191     {
6192       gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6193       tree clobber = build_constructor (ctx->record_type, NULL);
6194       TREE_THIS_VOLATILE (clobber) = 1;
6195       gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6196 						   clobber), GSI_SAME_STMT);
6197     }
6198   gimple_seq_add_seq (&bind_body, bind_body_tail);
6199   gimple_bind_set_body (bind, bind_body);
6200 
6201   pop_gimplify_context (bind);
6202 
6203   gimple_bind_append_vars (bind, ctx->block_vars);
6204   BLOCK_VARS (block) = ctx->block_vars;
6205   if (BLOCK_VARS (block))
6206     TREE_USED (block) = 1;
6207 }
6208 
6209 
6210 /* Expand code for an OpenMP master directive.  */
6211 
6212 static void
6213 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6214 {
6215   tree block, lab = NULL, x, bfn_decl;
6216   gimple *stmt = gsi_stmt (*gsi_p);
6217   gbind *bind;
6218   location_t loc = gimple_location (stmt);
6219   gimple_seq tseq;
6220 
6221   push_gimplify_context ();
6222 
6223   block = make_node (BLOCK);
6224   bind = gimple_build_bind (NULL, NULL, block);
6225   gsi_replace (gsi_p, bind, true);
6226   gimple_bind_add_stmt (bind, stmt);
6227 
6228   bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6229   x = build_call_expr_loc (loc, bfn_decl, 0);
6230   x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6231   x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6232   tseq = NULL;
6233   gimplify_and_add (x, &tseq);
6234   gimple_bind_add_seq (bind, tseq);
6235 
6236   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6237   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6238   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6239   gimple_omp_set_body (stmt, NULL);
6240 
6241   gimple_bind_add_stmt (bind, gimple_build_label (lab));
6242 
6243   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6244 
6245   pop_gimplify_context (bind);
6246 
6247   gimple_bind_append_vars (bind, ctx->block_vars);
6248   BLOCK_VARS (block) = ctx->block_vars;
6249 }
6250 
6251 
6252 /* Expand code for an OpenMP taskgroup directive.  */
6253 
6254 static void
6255 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6256 {
6257   gimple *stmt = gsi_stmt (*gsi_p);
6258   gcall *x;
6259   gbind *bind;
6260   tree block = make_node (BLOCK);
6261 
6262   bind = gimple_build_bind (NULL, NULL, block);
6263   gsi_replace (gsi_p, bind, true);
6264   gimple_bind_add_stmt (bind, stmt);
6265 
6266   x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6267 			 0);
6268   gimple_bind_add_stmt (bind, x);
6269 
6270   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6271   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6272   gimple_omp_set_body (stmt, NULL);
6273 
6274   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6275 
6276   gimple_bind_append_vars (bind, ctx->block_vars);
6277   BLOCK_VARS (block) = ctx->block_vars;
6278 }
6279 
6280 
6281 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible.  */
6282 
6283 static void
6284 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6285 			   omp_context *ctx)
6286 {
6287   struct omp_for_data fd;
6288   if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6289     return;
6290 
6291   unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6292   struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6293   omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6294   if (!fd.ordered)
6295     return;
6296 
6297   tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6298   tree c = gimple_omp_ordered_clauses (ord_stmt);
6299   if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6300       && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6301     {
6302       /* Merge depend clauses from multiple adjacent
6303 	 #pragma omp ordered depend(sink:...) constructs
6304 	 into one #pragma omp ordered depend(sink:...), so that
6305 	 we can optimize them together.  */
6306       gimple_stmt_iterator gsi = *gsi_p;
6307       gsi_next (&gsi);
6308       while (!gsi_end_p (gsi))
6309 	{
6310 	  gimple *stmt = gsi_stmt (gsi);
6311 	  if (is_gimple_debug (stmt)
6312 	      || gimple_code (stmt) == GIMPLE_NOP)
6313 	    {
6314 	      gsi_next (&gsi);
6315 	      continue;
6316 	    }
6317 	  if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6318 	    break;
6319 	  gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6320 	  c = gimple_omp_ordered_clauses (ord_stmt2);
6321 	  if (c == NULL_TREE
6322 	      || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6323 	      || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6324 	    break;
6325 	  while (*list_p)
6326 	    list_p = &OMP_CLAUSE_CHAIN (*list_p);
6327 	  *list_p = c;
6328 	  gsi_remove (&gsi, true);
6329 	}
6330     }
6331 
6332   /* Canonicalize sink dependence clauses into one folded clause if
6333      possible.
6334 
6335      The basic algorithm is to create a sink vector whose first
6336      element is the GCD of all the first elements, and whose remaining
6337      elements are the minimum of the subsequent columns.
6338 
6339      We ignore dependence vectors whose first element is zero because
6340      such dependencies are known to be executed by the same thread.
6341 
6342      We take into account the direction of the loop, so a minimum
6343      becomes a maximum if the loop is iterating forwards.  We also
6344      ignore sink clauses where the loop direction is unknown, or where
6345      the offsets are clearly invalid because they are not a multiple
6346      of the loop increment.
6347 
6348      For example:
6349 
6350 	#pragma omp for ordered(2)
6351 	for (i=0; i < N; ++i)
6352 	  for (j=0; j < M; ++j)
6353 	    {
6354 	      #pragma omp ordered \
6355 		depend(sink:i-8,j-2) \
6356 		depend(sink:i,j-1) \	// Completely ignored because i+0.
6357 		depend(sink:i-4,j-3) \
6358 		depend(sink:i-6,j-4)
6359 	      #pragma omp ordered depend(source)
6360 	    }
6361 
6362      Folded clause is:
6363 
6364 	depend(sink:-gcd(8,4,6),-min(2,3,4))
6365 	  -or-
6366 	depend(sink:-2,-2)
6367   */
6368 
6369   /* FIXME: Computing GCD's where the first element is zero is
6370      non-trivial in the presence of collapsed loops.  Do this later.  */
6371   if (fd.collapse > 1)
6372     return;
6373 
6374   wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6375   memset (folded_deps, 0, sizeof (*folded_deps) * (2 * len - 1));
6376   tree folded_dep = NULL_TREE;
6377   /* TRUE if the first dimension's offset is negative.  */
6378   bool neg_offset_p = false;
6379 
6380   list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6381   unsigned int i;
6382   while ((c = *list_p) != NULL)
6383     {
6384       bool remove = false;
6385 
6386       gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6387       if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6388 	goto next_ordered_clause;
6389 
6390       tree vec;
6391       for (vec = OMP_CLAUSE_DECL (c), i = 0;
6392 	   vec && TREE_CODE (vec) == TREE_LIST;
6393 	   vec = TREE_CHAIN (vec), ++i)
6394 	{
6395 	  gcc_assert (i < len);
6396 
6397 	  /* omp_extract_for_data has canonicalized the condition.  */
6398 	  gcc_assert (fd.loops[i].cond_code == LT_EXPR
6399 		      || fd.loops[i].cond_code == GT_EXPR);
6400 	  bool forward = fd.loops[i].cond_code == LT_EXPR;
6401 	  bool maybe_lexically_later = true;
6402 
6403 	  /* While the committee makes up its mind, bail if we have any
6404 	     non-constant steps.  */
6405 	  if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6406 	    goto lower_omp_ordered_ret;
6407 
6408 	  tree itype = TREE_TYPE (TREE_VALUE (vec));
6409 	  if (POINTER_TYPE_P (itype))
6410 	    itype = sizetype;
6411 	  wide_int offset = wide_int::from (TREE_PURPOSE (vec),
6412 					    TYPE_PRECISION (itype),
6413 					    TYPE_SIGN (itype));
6414 
6415 	  /* Ignore invalid offsets that are not multiples of the step.  */
6416 	  if (!wi::multiple_of_p
6417 	      (wi::abs (offset), wi::abs ((wide_int) fd.loops[i].step),
6418 	       UNSIGNED))
6419 	    {
6420 	      warning_at (OMP_CLAUSE_LOCATION (c), 0,
6421 			  "ignoring sink clause with offset that is not "
6422 			  "a multiple of the loop step");
6423 	      remove = true;
6424 	      goto next_ordered_clause;
6425 	    }
6426 
6427 	  /* Calculate the first dimension.  The first dimension of
6428 	     the folded dependency vector is the GCD of the first
6429 	     elements, while ignoring any first elements whose offset
6430 	     is 0.  */
6431 	  if (i == 0)
6432 	    {
6433 	      /* Ignore dependence vectors whose first dimension is 0.  */
6434 	      if (offset == 0)
6435 		{
6436 		  remove = true;
6437 		  goto next_ordered_clause;
6438 		}
6439 	      else
6440 		{
6441 		  if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6442 		    {
6443 		      error_at (OMP_CLAUSE_LOCATION (c),
6444 				"first offset must be in opposite direction "
6445 				"of loop iterations");
6446 		      goto lower_omp_ordered_ret;
6447 		    }
6448 		  if (forward)
6449 		    offset = -offset;
6450 		  neg_offset_p = forward;
6451 		  /* Initialize the first time around.  */
6452 		  if (folded_dep == NULL_TREE)
6453 		    {
6454 		      folded_dep = c;
6455 		      folded_deps[0] = offset;
6456 		    }
6457 		  else
6458 		    folded_deps[0] = wi::gcd (folded_deps[0],
6459 					      offset, UNSIGNED);
6460 		}
6461 	    }
6462 	  /* Calculate minimum for the remaining dimensions.  */
6463 	  else
6464 	    {
6465 	      folded_deps[len + i - 1] = offset;
6466 	      if (folded_dep == c)
6467 		folded_deps[i] = offset;
6468 	      else if (maybe_lexically_later
6469 		       && !wi::eq_p (folded_deps[i], offset))
6470 		{
6471 		  if (forward ^ wi::gts_p (folded_deps[i], offset))
6472 		    {
6473 		      unsigned int j;
6474 		      folded_dep = c;
6475 		      for (j = 1; j <= i; j++)
6476 			folded_deps[j] = folded_deps[len + j - 1];
6477 		    }
6478 		  else
6479 		    maybe_lexically_later = false;
6480 		}
6481 	    }
6482 	}
6483       gcc_assert (i == len);
6484 
6485       remove = true;
6486 
6487     next_ordered_clause:
6488       if (remove)
6489 	*list_p = OMP_CLAUSE_CHAIN (c);
6490       else
6491 	list_p = &OMP_CLAUSE_CHAIN (c);
6492     }
6493 
6494   if (folded_dep)
6495     {
6496       if (neg_offset_p)
6497 	folded_deps[0] = -folded_deps[0];
6498 
6499       tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6500       if (POINTER_TYPE_P (itype))
6501 	itype = sizetype;
6502 
6503       TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6504 	= wide_int_to_tree (itype, folded_deps[0]);
6505       OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6506       *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6507     }
6508 
6509  lower_omp_ordered_ret:
6510 
6511   /* Ordered without clauses is #pragma omp threads, while we want
6512      a nop instead if we remove all clauses.  */
6513   if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6514     gsi_replace (gsi_p, gimple_build_nop (), true);
6515 }
6516 
6517 
6518 /* Expand code for an OpenMP ordered directive.  */
6519 
6520 static void
6521 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6522 {
6523   tree block;
6524   gimple *stmt = gsi_stmt (*gsi_p), *g;
6525   gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6526   gcall *x;
6527   gbind *bind;
6528   bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6529 			       OMP_CLAUSE_SIMD);
6530   /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6531      loop.  */
6532   bool maybe_simt
6533     = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6534   bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6535 				  OMP_CLAUSE_THREADS);
6536 
6537   if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6538 		       OMP_CLAUSE_DEPEND))
6539     {
6540       /* FIXME: This is needs to be moved to the expansion to verify various
6541 	 conditions only testable on cfg with dominators computed, and also
6542 	 all the depend clauses to be merged still might need to be available
6543 	 for the runtime checks.  */
6544       if (0)
6545 	lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6546       return;
6547     }
6548 
6549   push_gimplify_context ();
6550 
6551   block = make_node (BLOCK);
6552   bind = gimple_build_bind (NULL, NULL, block);
6553   gsi_replace (gsi_p, bind, true);
6554   gimple_bind_add_stmt (bind, stmt);
6555 
6556   if (simd)
6557     {
6558       x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6559 				      build_int_cst (NULL_TREE, threads));
6560       cfun->has_simduid_loops = true;
6561     }
6562   else
6563     x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6564 			   0);
6565   gimple_bind_add_stmt (bind, x);
6566 
6567   tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6568   if (maybe_simt)
6569     {
6570       counter = create_tmp_var (integer_type_node);
6571       g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6572       gimple_call_set_lhs (g, counter);
6573       gimple_bind_add_stmt (bind, g);
6574 
6575       body = create_artificial_label (UNKNOWN_LOCATION);
6576       test = create_artificial_label (UNKNOWN_LOCATION);
6577       gimple_bind_add_stmt (bind, gimple_build_label (body));
6578 
6579       tree simt_pred = create_tmp_var (integer_type_node);
6580       g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6581       gimple_call_set_lhs (g, simt_pred);
6582       gimple_bind_add_stmt (bind, g);
6583 
6584       tree t = create_artificial_label (UNKNOWN_LOCATION);
6585       g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6586       gimple_bind_add_stmt (bind, g);
6587 
6588       gimple_bind_add_stmt (bind, gimple_build_label (t));
6589     }
6590   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6591   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6592   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6593   gimple_omp_set_body (stmt, NULL);
6594 
6595   if (maybe_simt)
6596     {
6597       gimple_bind_add_stmt (bind, gimple_build_label (test));
6598       g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6599       gimple_bind_add_stmt (bind, g);
6600 
6601       tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6602       tree nonneg = create_tmp_var (integer_type_node);
6603       gimple_seq tseq = NULL;
6604       gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6605       gimple_bind_add_seq (bind, tseq);
6606 
6607       g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6608       gimple_call_set_lhs (g, nonneg);
6609       gimple_bind_add_stmt (bind, g);
6610 
6611       tree end = create_artificial_label (UNKNOWN_LOCATION);
6612       g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6613       gimple_bind_add_stmt (bind, g);
6614 
6615       gimple_bind_add_stmt (bind, gimple_build_label (end));
6616     }
6617   if (simd)
6618     x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6619 				    build_int_cst (NULL_TREE, threads));
6620   else
6621     x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6622 			   0);
6623   gimple_bind_add_stmt (bind, x);
6624 
6625   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6626 
6627   pop_gimplify_context (bind);
6628 
6629   gimple_bind_append_vars (bind, ctx->block_vars);
6630   BLOCK_VARS (block) = gimple_bind_vars (bind);
6631 }
6632 
6633 
6634 /* Gimplify a GIMPLE_OMP_CRITICAL statement.  This is a relatively simple
6635    substitution of a couple of function calls.  But in the NAMED case,
6636    requires that languages coordinate a symbol name.  It is therefore
6637    best put here in common code.  */
6638 
6639 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6640 
6641 static void
6642 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6643 {
6644   tree block;
6645   tree name, lock, unlock;
6646   gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6647   gbind *bind;
6648   location_t loc = gimple_location (stmt);
6649   gimple_seq tbody;
6650 
6651   name = gimple_omp_critical_name (stmt);
6652   if (name)
6653     {
6654       tree decl;
6655 
6656       if (!critical_name_mutexes)
6657 	critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6658 
6659       tree *n = critical_name_mutexes->get (name);
6660       if (n == NULL)
6661 	{
6662 	  char *new_str;
6663 
6664 	  decl = create_tmp_var_raw (ptr_type_node);
6665 
6666 	  new_str = ACONCAT ((".gomp_critical_user_",
6667 			      IDENTIFIER_POINTER (name), NULL));
6668 	  DECL_NAME (decl) = get_identifier (new_str);
6669 	  TREE_PUBLIC (decl) = 1;
6670 	  TREE_STATIC (decl) = 1;
6671 	  DECL_COMMON (decl) = 1;
6672 	  DECL_ARTIFICIAL (decl) = 1;
6673 	  DECL_IGNORED_P (decl) = 1;
6674 
6675 	  varpool_node::finalize_decl (decl);
6676 
6677 	  critical_name_mutexes->put (name, decl);
6678 	}
6679       else
6680 	decl = *n;
6681 
6682       /* If '#pragma omp critical' is inside offloaded region or
6683 	 inside function marked as offloadable, the symbol must be
6684 	 marked as offloadable too.  */
6685       omp_context *octx;
6686       if (cgraph_node::get (current_function_decl)->offloadable)
6687 	varpool_node::get_create (decl)->offloadable = 1;
6688       else
6689 	for (octx = ctx->outer; octx; octx = octx->outer)
6690 	  if (is_gimple_omp_offloaded (octx->stmt))
6691 	    {
6692 	      varpool_node::get_create (decl)->offloadable = 1;
6693 	      break;
6694 	    }
6695 
6696       lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6697       lock = build_call_expr_loc (loc, lock, 1,
6698 				  build_fold_addr_expr_loc (loc, decl));
6699 
6700       unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6701       unlock = build_call_expr_loc (loc, unlock, 1,
6702 				build_fold_addr_expr_loc (loc, decl));
6703     }
6704   else
6705     {
6706       lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6707       lock = build_call_expr_loc (loc, lock, 0);
6708 
6709       unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6710       unlock = build_call_expr_loc (loc, unlock, 0);
6711     }
6712 
6713   push_gimplify_context ();
6714 
6715   block = make_node (BLOCK);
6716   bind = gimple_build_bind (NULL, NULL, block);
6717   gsi_replace (gsi_p, bind, true);
6718   gimple_bind_add_stmt (bind, stmt);
6719 
6720   tbody = gimple_bind_body (bind);
6721   gimplify_and_add (lock, &tbody);
6722   gimple_bind_set_body (bind, tbody);
6723 
6724   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6725   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6726   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6727   gimple_omp_set_body (stmt, NULL);
6728 
6729   tbody = gimple_bind_body (bind);
6730   gimplify_and_add (unlock, &tbody);
6731   gimple_bind_set_body (bind, tbody);
6732 
6733   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6734 
6735   pop_gimplify_context (bind);
6736   gimple_bind_append_vars (bind, ctx->block_vars);
6737   BLOCK_VARS (block) = gimple_bind_vars (bind);
6738 }
6739 
6740 /* A subroutine of lower_omp_for.  Generate code to emit the predicate
6741    for a lastprivate clause.  Given a loop control predicate of (V
6742    cond N2), we gate the clause on (!(V cond N2)).  The lowered form
6743    is appended to *DLIST, iterator initialization is appended to
6744    *BODY_P.  */
6745 
6746 static void
6747 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6748 			   gimple_seq *dlist, struct omp_context *ctx)
6749 {
6750   tree clauses, cond, vinit;
6751   enum tree_code cond_code;
6752   gimple_seq stmts;
6753 
6754   cond_code = fd->loop.cond_code;
6755   cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6756 
6757   /* When possible, use a strict equality expression.  This can let VRP
6758      type optimizations deduce the value and remove a copy.  */
6759   if (tree_fits_shwi_p (fd->loop.step))
6760     {
6761       HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6762       if (step == 1 || step == -1)
6763 	cond_code = EQ_EXPR;
6764     }
6765 
6766   if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6767       || gimple_omp_for_grid_phony (fd->for_stmt))
6768     cond = omp_grid_lastprivate_predicate (fd);
6769   else
6770     {
6771       tree n2 = fd->loop.n2;
6772       if (fd->collapse > 1
6773 	  && TREE_CODE (n2) != INTEGER_CST
6774 	  && gimple_omp_for_combined_into_p (fd->for_stmt))
6775 	{
6776 	  struct omp_context *taskreg_ctx = NULL;
6777 	  if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6778 	    {
6779 	      gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6780 	      if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6781 		  || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6782 		{
6783 		  if (gimple_omp_for_combined_into_p (gfor))
6784 		    {
6785 		      gcc_assert (ctx->outer->outer
6786 				  && is_parallel_ctx (ctx->outer->outer));
6787 		      taskreg_ctx = ctx->outer->outer;
6788 		    }
6789 		  else
6790 		    {
6791 		      struct omp_for_data outer_fd;
6792 		      omp_extract_for_data (gfor, &outer_fd, NULL);
6793 		      n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6794 		    }
6795 		}
6796 	      else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6797 		taskreg_ctx = ctx->outer->outer;
6798 	    }
6799 	  else if (is_taskreg_ctx (ctx->outer))
6800 	    taskreg_ctx = ctx->outer;
6801 	  if (taskreg_ctx)
6802 	    {
6803 	      int i;
6804 	      tree taskreg_clauses
6805 		= gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6806 	      tree innerc = omp_find_clause (taskreg_clauses,
6807 					     OMP_CLAUSE__LOOPTEMP_);
6808 	      gcc_assert (innerc);
6809 	      for (i = 0; i < fd->collapse; i++)
6810 		{
6811 		  innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6812 					    OMP_CLAUSE__LOOPTEMP_);
6813 		  gcc_assert (innerc);
6814 		}
6815 	      innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6816 					OMP_CLAUSE__LOOPTEMP_);
6817 	      if (innerc)
6818 		n2 = fold_convert (TREE_TYPE (n2),
6819 				   lookup_decl (OMP_CLAUSE_DECL (innerc),
6820 						taskreg_ctx));
6821 	    }
6822 	}
6823       cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6824     }
6825 
6826   clauses = gimple_omp_for_clauses (fd->for_stmt);
6827   stmts = NULL;
6828   lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6829   if (!gimple_seq_empty_p (stmts))
6830     {
6831       gimple_seq_add_seq (&stmts, *dlist);
6832       *dlist = stmts;
6833 
6834       /* Optimize: v = 0; is usually cheaper than v = some_other_constant.  */
6835       vinit = fd->loop.n1;
6836       if (cond_code == EQ_EXPR
6837 	  && tree_fits_shwi_p (fd->loop.n2)
6838 	  && ! integer_zerop (fd->loop.n2))
6839 	vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6840       else
6841 	vinit = unshare_expr (vinit);
6842 
6843       /* Initialize the iterator variable, so that threads that don't execute
6844 	 any iterations don't execute the lastprivate clauses by accident.  */
6845       gimplify_assign (fd->loop.v, vinit, body_p);
6846     }
6847 }
6848 
6849 
6850 /* Lower code for an OMP loop directive.  */
6851 
6852 static void
6853 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6854 {
6855   tree *rhs_p, block;
6856   struct omp_for_data fd, *fdp = NULL;
6857   gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6858   gbind *new_stmt;
6859   gimple_seq omp_for_body, body, dlist;
6860   gimple_seq oacc_head = NULL, oacc_tail = NULL;
6861   size_t i;
6862 
6863   push_gimplify_context ();
6864 
6865   lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6866 
6867   block = make_node (BLOCK);
6868   new_stmt = gimple_build_bind (NULL, NULL, block);
6869   /* Replace at gsi right away, so that 'stmt' is no member
6870      of a sequence anymore as we're going to add to a different
6871      one below.  */
6872   gsi_replace (gsi_p, new_stmt, true);
6873 
6874   /* Move declaration of temporaries in the loop body before we make
6875      it go away.  */
6876   omp_for_body = gimple_omp_body (stmt);
6877   if (!gimple_seq_empty_p (omp_for_body)
6878       && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6879     {
6880       gbind *inner_bind
6881 	= as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6882       tree vars = gimple_bind_vars (inner_bind);
6883       gimple_bind_append_vars (new_stmt, vars);
6884       /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6885 	 keep them on the inner_bind and it's block.  */
6886       gimple_bind_set_vars (inner_bind, NULL_TREE);
6887       if (gimple_bind_block (inner_bind))
6888 	BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6889     }
6890 
6891   if (gimple_omp_for_combined_into_p (stmt))
6892     {
6893       omp_extract_for_data (stmt, &fd, NULL);
6894       fdp = &fd;
6895 
6896       /* We need two temporaries with fd.loop.v type (istart/iend)
6897 	 and then (fd.collapse - 1) temporaries with the same
6898 	 type for count2 ... countN-1 vars if not constant.  */
6899       size_t count = 2;
6900       tree type = fd.iter_type;
6901       if (fd.collapse > 1
6902 	  && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6903 	count += fd.collapse - 1;
6904       bool taskreg_for
6905 	= (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6906 	   || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6907       tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6908       tree simtc = NULL;
6909       tree clauses = *pc;
6910       if (taskreg_for)
6911 	outerc
6912 	  = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6913 			     OMP_CLAUSE__LOOPTEMP_);
6914       if (ctx->simt_stmt)
6915 	simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6916 				 OMP_CLAUSE__LOOPTEMP_);
6917       for (i = 0; i < count; i++)
6918 	{
6919 	  tree temp;
6920 	  if (taskreg_for)
6921 	    {
6922 	      gcc_assert (outerc);
6923 	      temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6924 	      outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6925 					OMP_CLAUSE__LOOPTEMP_);
6926 	    }
6927 	  else
6928 	    {
6929 	      /* If there are 2 adjacent SIMD stmts, one with _simt_
6930 		 clause, another without, make sure they have the same
6931 		 decls in _looptemp_ clauses, because the outer stmt
6932 		 they are combined into will look up just one inner_stmt.  */
6933 	      if (ctx->simt_stmt)
6934 		temp = OMP_CLAUSE_DECL (simtc);
6935 	      else
6936 		temp = create_tmp_var (type);
6937 	      insert_decl_map (&ctx->outer->cb, temp, temp);
6938 	    }
6939 	  *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6940 	  OMP_CLAUSE_DECL (*pc) = temp;
6941 	  pc = &OMP_CLAUSE_CHAIN (*pc);
6942 	  if (ctx->simt_stmt)
6943 	    simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6944 				     OMP_CLAUSE__LOOPTEMP_);
6945 	}
6946       *pc = clauses;
6947     }
6948 
6949   /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR.  */
6950   dlist = NULL;
6951   body = NULL;
6952   lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6953 			   fdp);
6954   gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6955 
6956   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6957 
6958   /* Lower the header expressions.  At this point, we can assume that
6959      the header is of the form:
6960 
6961      	#pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6962 
6963      We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6964      using the .omp_data_s mapping, if needed.  */
6965   for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6966     {
6967       rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6968       if (!is_gimple_min_invariant (*rhs_p))
6969 	*rhs_p = get_formal_tmp_var (*rhs_p, &body);
6970       else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6971 	recompute_tree_invariant_for_addr_expr (*rhs_p);
6972 
6973       rhs_p = gimple_omp_for_final_ptr (stmt, i);
6974       if (!is_gimple_min_invariant (*rhs_p))
6975 	*rhs_p = get_formal_tmp_var (*rhs_p, &body);
6976       else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6977 	recompute_tree_invariant_for_addr_expr (*rhs_p);
6978 
6979       rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6980       if (!is_gimple_min_invariant (*rhs_p))
6981 	*rhs_p = get_formal_tmp_var (*rhs_p, &body);
6982     }
6983 
6984   /* Once lowered, extract the bounds and clauses.  */
6985   omp_extract_for_data (stmt, &fd, NULL);
6986 
6987   if (is_gimple_omp_oacc (ctx->stmt)
6988       && !ctx_in_oacc_kernels_region (ctx))
6989     lower_oacc_head_tail (gimple_location (stmt),
6990 			  gimple_omp_for_clauses (stmt),
6991 			  &oacc_head, &oacc_tail, ctx);
6992 
6993   /* Add OpenACC partitioning and reduction markers just before the loop.  */
6994   if (oacc_head)
6995     gimple_seq_add_seq (&body, oacc_head);
6996 
6997   lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6998 
6999   if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
7000     for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
7001       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7002 	  && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7003 	{
7004 	  OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7005 	  if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
7006 	    OMP_CLAUSE_LINEAR_STEP (c)
7007 	      = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
7008 						ctx);
7009 	}
7010 
7011   bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
7012 		     && gimple_omp_for_grid_phony (stmt));
7013   if (!phony_loop)
7014     gimple_seq_add_stmt (&body, stmt);
7015   gimple_seq_add_seq (&body, gimple_omp_body (stmt));
7016 
7017   if (!phony_loop)
7018     gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
7019 							   fd.loop.v));
7020 
7021   /* After the loop, add exit clauses.  */
7022   lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
7023 
7024   if (ctx->cancellable)
7025     gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
7026 
7027   gimple_seq_add_seq (&body, dlist);
7028 
7029   body = maybe_catch_exception (body);
7030 
7031   if (!phony_loop)
7032     {
7033       /* Region exit marker goes at the end of the loop body.  */
7034       gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
7035       maybe_add_implicit_barrier_cancel (ctx, &body);
7036     }
7037 
7038   /* Add OpenACC joining and reduction markers just after the loop.  */
7039   if (oacc_tail)
7040     gimple_seq_add_seq (&body, oacc_tail);
7041 
7042   pop_gimplify_context (new_stmt);
7043 
7044   gimple_bind_append_vars (new_stmt, ctx->block_vars);
7045   maybe_remove_omp_member_access_dummy_vars (new_stmt);
7046   BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
7047   if (BLOCK_VARS (block))
7048     TREE_USED (block) = 1;
7049 
7050   gimple_bind_set_body (new_stmt, body);
7051   gimple_omp_set_body (stmt, NULL);
7052   gimple_omp_for_set_pre_body (stmt, NULL);
7053 }
7054 
7055 /* Callback for walk_stmts.  Check if the current statement only contains
7056    GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS.  */
7057 
7058 static tree
7059 check_combined_parallel (gimple_stmt_iterator *gsi_p,
7060     			 bool *handled_ops_p,
7061     			 struct walk_stmt_info *wi)
7062 {
7063   int *info = (int *) wi->info;
7064   gimple *stmt = gsi_stmt (*gsi_p);
7065 
7066   *handled_ops_p = true;
7067   switch (gimple_code (stmt))
7068     {
7069     WALK_SUBSTMTS;
7070 
7071     case GIMPLE_OMP_FOR:
7072     case GIMPLE_OMP_SECTIONS:
7073       *info = *info == 0 ? 1 : -1;
7074       break;
7075     default:
7076       *info = -1;
7077       break;
7078     }
7079   return NULL;
7080 }
7081 
7082 struct omp_taskcopy_context
7083 {
7084   /* This field must be at the beginning, as we do "inheritance": Some
7085      callback functions for tree-inline.c (e.g., omp_copy_decl)
7086      receive a copy_body_data pointer that is up-casted to an
7087      omp_context pointer.  */
7088   copy_body_data cb;
7089   omp_context *ctx;
7090 };
7091 
7092 static tree
7093 task_copyfn_copy_decl (tree var, copy_body_data *cb)
7094 {
7095   struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
7096 
7097   if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7098     return create_tmp_var (TREE_TYPE (var));
7099 
7100   return var;
7101 }
7102 
7103 static tree
7104 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7105 {
7106   tree name, new_fields = NULL, type, f;
7107 
7108   type = lang_hooks.types.make_type (RECORD_TYPE);
7109   name = DECL_NAME (TYPE_NAME (orig_type));
7110   name = build_decl (gimple_location (tcctx->ctx->stmt),
7111 		     TYPE_DECL, name, type);
7112   TYPE_NAME (type) = name;
7113 
7114   for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7115     {
7116       tree new_f = copy_node (f);
7117       DECL_CONTEXT (new_f) = type;
7118       TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7119       TREE_CHAIN (new_f) = new_fields;
7120       walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7121       walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7122       walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7123 		 &tcctx->cb, NULL);
7124       new_fields = new_f;
7125       tcctx->cb.decl_map->put (f, new_f);
7126     }
7127   TYPE_FIELDS (type) = nreverse (new_fields);
7128   layout_type (type);
7129   return type;
7130 }
7131 
7132 /* Create task copyfn.  */
7133 
7134 static void
7135 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7136 {
7137   struct function *child_cfun;
7138   tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7139   tree record_type, srecord_type, bind, list;
7140   bool record_needs_remap = false, srecord_needs_remap = false;
7141   splay_tree_node n;
7142   struct omp_taskcopy_context tcctx;
7143   location_t loc = gimple_location (task_stmt);
7144   size_t looptempno = 0;
7145 
7146   child_fn = gimple_omp_task_copy_fn (task_stmt);
7147   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7148   gcc_assert (child_cfun->cfg == NULL);
7149   DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7150 
7151   /* Reset DECL_CONTEXT on function arguments.  */
7152   for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7153     DECL_CONTEXT (t) = child_fn;
7154 
7155   /* Populate the function.  */
7156   push_gimplify_context ();
7157   push_cfun (child_cfun);
7158 
7159   bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7160   TREE_SIDE_EFFECTS (bind) = 1;
7161   list = NULL;
7162   DECL_SAVED_TREE (child_fn) = bind;
7163   DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7164 
7165   /* Remap src and dst argument types if needed.  */
7166   record_type = ctx->record_type;
7167   srecord_type = ctx->srecord_type;
7168   for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7169     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7170       {
7171 	record_needs_remap = true;
7172 	break;
7173       }
7174   for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7175     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7176       {
7177 	srecord_needs_remap = true;
7178 	break;
7179       }
7180 
7181   if (record_needs_remap || srecord_needs_remap)
7182     {
7183       memset (&tcctx, '\0', sizeof (tcctx));
7184       tcctx.cb.src_fn = ctx->cb.src_fn;
7185       tcctx.cb.dst_fn = child_fn;
7186       tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7187       gcc_checking_assert (tcctx.cb.src_node);
7188       tcctx.cb.dst_node = tcctx.cb.src_node;
7189       tcctx.cb.src_cfun = ctx->cb.src_cfun;
7190       tcctx.cb.copy_decl = task_copyfn_copy_decl;
7191       tcctx.cb.eh_lp_nr = 0;
7192       tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7193       tcctx.cb.decl_map = new hash_map<tree, tree>;
7194       tcctx.ctx = ctx;
7195 
7196       if (record_needs_remap)
7197 	record_type = task_copyfn_remap_type (&tcctx, record_type);
7198       if (srecord_needs_remap)
7199 	srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7200     }
7201   else
7202     tcctx.cb.decl_map = NULL;
7203 
7204   arg = DECL_ARGUMENTS (child_fn);
7205   TREE_TYPE (arg) = build_pointer_type (record_type);
7206   sarg = DECL_CHAIN (arg);
7207   TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7208 
7209   /* First pass: initialize temporaries used in record_type and srecord_type
7210      sizes and field offsets.  */
7211   if (tcctx.cb.decl_map)
7212     for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7213       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7214 	{
7215 	  tree *p;
7216 
7217 	  decl = OMP_CLAUSE_DECL (c);
7218 	  p = tcctx.cb.decl_map->get (decl);
7219 	  if (p == NULL)
7220 	    continue;
7221 	  n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7222 	  sf = (tree) n->value;
7223 	  sf = *tcctx.cb.decl_map->get (sf);
7224 	  src = build_simple_mem_ref_loc (loc, sarg);
7225 	  src = omp_build_component_ref (src, sf);
7226 	  t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7227 	  append_to_statement_list (t, &list);
7228 	}
7229 
7230   /* Second pass: copy shared var pointers and copy construct non-VLA
7231      firstprivate vars.  */
7232   for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7233     switch (OMP_CLAUSE_CODE (c))
7234       {
7235 	splay_tree_key key;
7236       case OMP_CLAUSE_SHARED:
7237 	decl = OMP_CLAUSE_DECL (c);
7238 	key = (splay_tree_key) decl;
7239 	if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7240 	  key = (splay_tree_key) &DECL_UID (decl);
7241 	n = splay_tree_lookup (ctx->field_map, key);
7242 	if (n == NULL)
7243 	  break;
7244 	f = (tree) n->value;
7245 	if (tcctx.cb.decl_map)
7246 	  f = *tcctx.cb.decl_map->get (f);
7247 	n = splay_tree_lookup (ctx->sfield_map, key);
7248 	sf = (tree) n->value;
7249 	if (tcctx.cb.decl_map)
7250 	  sf = *tcctx.cb.decl_map->get (sf);
7251 	src = build_simple_mem_ref_loc (loc, sarg);
7252 	src = omp_build_component_ref (src, sf);
7253 	dst = build_simple_mem_ref_loc (loc, arg);
7254 	dst = omp_build_component_ref (dst, f);
7255 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7256 	append_to_statement_list (t, &list);
7257 	break;
7258       case OMP_CLAUSE__LOOPTEMP_:
7259 	/* Fields for first two _looptemp_ clauses are initialized by
7260 	   GOMP_taskloop*, the rest are handled like firstprivate.  */
7261         if (looptempno < 2)
7262 	  {
7263 	    looptempno++;
7264 	    break;
7265 	  }
7266 	/* FALLTHRU */
7267       case OMP_CLAUSE_FIRSTPRIVATE:
7268 	decl = OMP_CLAUSE_DECL (c);
7269 	if (is_variable_sized (decl))
7270 	  break;
7271 	n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7272 	if (n == NULL)
7273 	  break;
7274 	f = (tree) n->value;
7275 	if (tcctx.cb.decl_map)
7276 	  f = *tcctx.cb.decl_map->get (f);
7277 	n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7278 	if (n != NULL)
7279 	  {
7280 	    sf = (tree) n->value;
7281 	    if (tcctx.cb.decl_map)
7282 	      sf = *tcctx.cb.decl_map->get (sf);
7283 	    src = build_simple_mem_ref_loc (loc, sarg);
7284 	    src = omp_build_component_ref (src, sf);
7285 	    if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7286 	      src = build_simple_mem_ref_loc (loc, src);
7287 	  }
7288 	else
7289 	  src = decl;
7290 	dst = build_simple_mem_ref_loc (loc, arg);
7291 	dst = omp_build_component_ref (dst, f);
7292 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__LOOPTEMP_)
7293 	  t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7294 	else
7295 	  t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7296 	append_to_statement_list (t, &list);
7297 	break;
7298       case OMP_CLAUSE_PRIVATE:
7299 	if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7300 	  break;
7301 	decl = OMP_CLAUSE_DECL (c);
7302 	n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7303 	f = (tree) n->value;
7304 	if (tcctx.cb.decl_map)
7305 	  f = *tcctx.cb.decl_map->get (f);
7306 	n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7307 	if (n != NULL)
7308 	  {
7309 	    sf = (tree) n->value;
7310 	    if (tcctx.cb.decl_map)
7311 	      sf = *tcctx.cb.decl_map->get (sf);
7312 	    src = build_simple_mem_ref_loc (loc, sarg);
7313 	    src = omp_build_component_ref (src, sf);
7314 	    if (use_pointer_for_field (decl, NULL))
7315 	      src = build_simple_mem_ref_loc (loc, src);
7316 	  }
7317 	else
7318 	  src = decl;
7319 	dst = build_simple_mem_ref_loc (loc, arg);
7320 	dst = omp_build_component_ref (dst, f);
7321 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7322 	append_to_statement_list (t, &list);
7323 	break;
7324       default:
7325 	break;
7326       }
7327 
7328   /* Last pass: handle VLA firstprivates.  */
7329   if (tcctx.cb.decl_map)
7330     for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7331       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7332 	{
7333 	  tree ind, ptr, df;
7334 
7335 	  decl = OMP_CLAUSE_DECL (c);
7336 	  if (!is_variable_sized (decl))
7337 	    continue;
7338 	  n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7339 	  if (n == NULL)
7340 	    continue;
7341 	  f = (tree) n->value;
7342 	  f = *tcctx.cb.decl_map->get (f);
7343 	  gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7344 	  ind = DECL_VALUE_EXPR (decl);
7345 	  gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7346 	  gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7347 	  n = splay_tree_lookup (ctx->sfield_map,
7348 				 (splay_tree_key) TREE_OPERAND (ind, 0));
7349 	  sf = (tree) n->value;
7350 	  sf = *tcctx.cb.decl_map->get (sf);
7351 	  src = build_simple_mem_ref_loc (loc, sarg);
7352 	  src = omp_build_component_ref (src, sf);
7353 	  src = build_simple_mem_ref_loc (loc, src);
7354 	  dst = build_simple_mem_ref_loc (loc, arg);
7355 	  dst = omp_build_component_ref (dst, f);
7356 	  t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7357 	  append_to_statement_list (t, &list);
7358 	  n = splay_tree_lookup (ctx->field_map,
7359 				 (splay_tree_key) TREE_OPERAND (ind, 0));
7360 	  df = (tree) n->value;
7361 	  df = *tcctx.cb.decl_map->get (df);
7362 	  ptr = build_simple_mem_ref_loc (loc, arg);
7363 	  ptr = omp_build_component_ref (ptr, df);
7364 	  t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7365 		      build_fold_addr_expr_loc (loc, dst));
7366 	  append_to_statement_list (t, &list);
7367 	}
7368 
7369   t = build1 (RETURN_EXPR, void_type_node, NULL);
7370   append_to_statement_list (t, &list);
7371 
7372   if (tcctx.cb.decl_map)
7373     delete tcctx.cb.decl_map;
7374   pop_gimplify_context (NULL);
7375   BIND_EXPR_BODY (bind) = list;
7376   pop_cfun ();
7377 }
7378 
7379 static void
7380 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7381 {
7382   tree c, clauses;
7383   gimple *g;
7384   size_t n_in = 0, n_out = 0, idx = 2, i;
7385 
7386   clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7387   gcc_assert (clauses);
7388   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7389     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7390       switch (OMP_CLAUSE_DEPEND_KIND (c))
7391 	{
7392 	case OMP_CLAUSE_DEPEND_IN:
7393 	  n_in++;
7394 	  break;
7395 	case OMP_CLAUSE_DEPEND_OUT:
7396 	case OMP_CLAUSE_DEPEND_INOUT:
7397 	  n_out++;
7398 	  break;
7399 	case OMP_CLAUSE_DEPEND_SOURCE:
7400 	case OMP_CLAUSE_DEPEND_SINK:
7401 	  /* FALLTHRU */
7402 	default:
7403 	  gcc_unreachable ();
7404 	}
7405   tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7406   tree array = create_tmp_var (type);
7407   TREE_ADDRESSABLE (array) = 1;
7408   tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7409 		   NULL_TREE);
7410   g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7411   gimple_seq_add_stmt (iseq, g);
7412   r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7413 	      NULL_TREE);
7414   g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7415   gimple_seq_add_stmt (iseq, g);
7416   for (i = 0; i < 2; i++)
7417     {
7418       if ((i ? n_in : n_out) == 0)
7419 	continue;
7420       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7421 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7422 	    && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7423 	  {
7424 	    tree t = OMP_CLAUSE_DECL (c);
7425 	    t = fold_convert (ptr_type_node, t);
7426 	    gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7427 	    r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7428 			NULL_TREE, NULL_TREE);
7429 	    g = gimple_build_assign (r, t);
7430 	    gimple_seq_add_stmt (iseq, g);
7431 	  }
7432     }
7433   c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7434   OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7435   OMP_CLAUSE_CHAIN (c) = *pclauses;
7436   *pclauses = c;
7437   tree clobber = build_constructor (type, NULL);
7438   TREE_THIS_VOLATILE (clobber) = 1;
7439   g = gimple_build_assign (array, clobber);
7440   gimple_seq_add_stmt (oseq, g);
7441 }
7442 
7443 /* Lower the OpenMP parallel or task directive in the current statement
7444    in GSI_P.  CTX holds context information for the directive.  */
7445 
7446 static void
7447 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7448 {
7449   tree clauses;
7450   tree child_fn, t;
7451   gimple *stmt = gsi_stmt (*gsi_p);
7452   gbind *par_bind, *bind, *dep_bind = NULL;
7453   gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7454   location_t loc = gimple_location (stmt);
7455 
7456   clauses = gimple_omp_taskreg_clauses (stmt);
7457   par_bind
7458     = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7459   par_body = gimple_bind_body (par_bind);
7460   child_fn = ctx->cb.dst_fn;
7461   if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7462       && !gimple_omp_parallel_combined_p (stmt))
7463     {
7464       struct walk_stmt_info wi;
7465       int ws_num = 0;
7466 
7467       memset (&wi, 0, sizeof (wi));
7468       wi.info = &ws_num;
7469       wi.val_only = true;
7470       walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7471       if (ws_num == 1)
7472 	gimple_omp_parallel_set_combined_p (stmt, true);
7473     }
7474   gimple_seq dep_ilist = NULL;
7475   gimple_seq dep_olist = NULL;
7476   if (gimple_code (stmt) == GIMPLE_OMP_TASK
7477       && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7478     {
7479       push_gimplify_context ();
7480       dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7481       lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7482 			    &dep_ilist, &dep_olist);
7483     }
7484 
7485   if (ctx->srecord_type)
7486     create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7487 
7488   push_gimplify_context ();
7489 
7490   par_olist = NULL;
7491   par_ilist = NULL;
7492   par_rlist = NULL;
7493   bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7494     && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7495   if (phony_construct && ctx->record_type)
7496     {
7497       gcc_checking_assert (!ctx->receiver_decl);
7498       ctx->receiver_decl = create_tmp_var
7499 	(build_reference_type (ctx->record_type), ".omp_rec");
7500     }
7501   lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7502   lower_omp (&par_body, ctx);
7503   if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7504     lower_reduction_clauses (clauses, &par_rlist, ctx);
7505 
7506   /* Declare all the variables created by mapping and the variables
7507      declared in the scope of the parallel body.  */
7508   record_vars_into (ctx->block_vars, child_fn);
7509   maybe_remove_omp_member_access_dummy_vars (par_bind);
7510   record_vars_into (gimple_bind_vars (par_bind), child_fn);
7511 
7512   if (ctx->record_type)
7513     {
7514       ctx->sender_decl
7515 	= create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7516 			  : ctx->record_type, ".omp_data_o");
7517       DECL_NAMELESS (ctx->sender_decl) = 1;
7518       TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7519       gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7520     }
7521 
7522   olist = NULL;
7523   ilist = NULL;
7524   lower_send_clauses (clauses, &ilist, &olist, ctx);
7525   lower_send_shared_vars (&ilist, &olist, ctx);
7526 
7527   if (ctx->record_type)
7528     {
7529       tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7530       TREE_THIS_VOLATILE (clobber) = 1;
7531       gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7532 							clobber));
7533     }
7534 
7535   /* Once all the expansions are done, sequence all the different
7536      fragments inside gimple_omp_body.  */
7537 
7538   new_body = NULL;
7539 
7540   if (ctx->record_type)
7541     {
7542       t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7543       /* fixup_child_record_type might have changed receiver_decl's type.  */
7544       t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7545       gimple_seq_add_stmt (&new_body,
7546 	  		   gimple_build_assign (ctx->receiver_decl, t));
7547     }
7548 
7549   gimple_seq_add_seq (&new_body, par_ilist);
7550   gimple_seq_add_seq (&new_body, par_body);
7551   gimple_seq_add_seq (&new_body, par_rlist);
7552   if (ctx->cancellable)
7553     gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7554   gimple_seq_add_seq (&new_body, par_olist);
7555   new_body = maybe_catch_exception (new_body);
7556   if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7557     gimple_seq_add_stmt (&new_body,
7558 			 gimple_build_omp_continue (integer_zero_node,
7559 						    integer_zero_node));
7560   if (!phony_construct)
7561     {
7562       gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7563       gimple_omp_set_body (stmt, new_body);
7564     }
7565 
7566   bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7567   gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7568   gimple_bind_add_seq (bind, ilist);
7569   if (!phony_construct)
7570     gimple_bind_add_stmt (bind, stmt);
7571   else
7572     gimple_bind_add_seq (bind, new_body);
7573   gimple_bind_add_seq (bind, olist);
7574 
7575   pop_gimplify_context (NULL);
7576 
7577   if (dep_bind)
7578     {
7579       gimple_bind_add_seq (dep_bind, dep_ilist);
7580       gimple_bind_add_stmt (dep_bind, bind);
7581       gimple_bind_add_seq (dep_bind, dep_olist);
7582       pop_gimplify_context (dep_bind);
7583     }
7584 }
7585 
7586 /* Lower the GIMPLE_OMP_TARGET in the current statement
7587    in GSI_P.  CTX holds context information for the directive.  */
7588 
7589 static void
7590 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7591 {
7592   tree clauses;
7593   tree child_fn, t, c;
7594   gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7595   gbind *tgt_bind, *bind, *dep_bind = NULL;
7596   gimple_seq tgt_body, olist, ilist, fplist, new_body;
7597   location_t loc = gimple_location (stmt);
7598   bool offloaded, data_region;
7599   unsigned int map_cnt = 0;
7600 
7601   offloaded = is_gimple_omp_offloaded (stmt);
7602   switch (gimple_omp_target_kind (stmt))
7603     {
7604     case GF_OMP_TARGET_KIND_REGION:
7605     case GF_OMP_TARGET_KIND_UPDATE:
7606     case GF_OMP_TARGET_KIND_ENTER_DATA:
7607     case GF_OMP_TARGET_KIND_EXIT_DATA:
7608     case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7609     case GF_OMP_TARGET_KIND_OACC_KERNELS:
7610     case GF_OMP_TARGET_KIND_OACC_UPDATE:
7611     case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7612     case GF_OMP_TARGET_KIND_OACC_DECLARE:
7613       data_region = false;
7614       break;
7615     case GF_OMP_TARGET_KIND_DATA:
7616     case GF_OMP_TARGET_KIND_OACC_DATA:
7617     case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7618       data_region = true;
7619       break;
7620     default:
7621       gcc_unreachable ();
7622     }
7623 
7624   clauses = gimple_omp_target_clauses (stmt);
7625 
7626   gimple_seq dep_ilist = NULL;
7627   gimple_seq dep_olist = NULL;
7628   if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7629     {
7630       push_gimplify_context ();
7631       dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7632       lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7633 			    &dep_ilist, &dep_olist);
7634     }
7635 
7636   tgt_bind = NULL;
7637   tgt_body = NULL;
7638   if (offloaded)
7639     {
7640       tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7641       tgt_body = gimple_bind_body (tgt_bind);
7642     }
7643   else if (data_region)
7644     tgt_body = gimple_omp_body (stmt);
7645   child_fn = ctx->cb.dst_fn;
7646 
7647   push_gimplify_context ();
7648   fplist = NULL;
7649 
7650   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7651     switch (OMP_CLAUSE_CODE (c))
7652       {
7653 	tree var, x;
7654 
7655       default:
7656 	break;
7657       case OMP_CLAUSE_MAP:
7658 #if CHECKING_P
7659 	/* First check what we're prepared to handle in the following.  */
7660 	switch (OMP_CLAUSE_MAP_KIND (c))
7661 	  {
7662 	  case GOMP_MAP_ALLOC:
7663 	  case GOMP_MAP_TO:
7664 	  case GOMP_MAP_FROM:
7665 	  case GOMP_MAP_TOFROM:
7666 	  case GOMP_MAP_POINTER:
7667 	  case GOMP_MAP_TO_PSET:
7668 	  case GOMP_MAP_DELETE:
7669 	  case GOMP_MAP_RELEASE:
7670 	  case GOMP_MAP_ALWAYS_TO:
7671 	  case GOMP_MAP_ALWAYS_FROM:
7672 	  case GOMP_MAP_ALWAYS_TOFROM:
7673 	  case GOMP_MAP_FIRSTPRIVATE_POINTER:
7674 	  case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7675 	  case GOMP_MAP_STRUCT:
7676 	  case GOMP_MAP_ALWAYS_POINTER:
7677 	    break;
7678 	  case GOMP_MAP_FORCE_ALLOC:
7679 	  case GOMP_MAP_FORCE_TO:
7680 	  case GOMP_MAP_FORCE_FROM:
7681 	  case GOMP_MAP_FORCE_TOFROM:
7682 	  case GOMP_MAP_FORCE_PRESENT:
7683 	  case GOMP_MAP_FORCE_DEVICEPTR:
7684 	  case GOMP_MAP_DEVICE_RESIDENT:
7685 	  case GOMP_MAP_LINK:
7686 	    gcc_assert (is_gimple_omp_oacc (stmt));
7687 	    break;
7688 	  default:
7689 	    gcc_unreachable ();
7690 	  }
7691 #endif
7692 	  /* FALLTHRU */
7693       case OMP_CLAUSE_TO:
7694       case OMP_CLAUSE_FROM:
7695       oacc_firstprivate:
7696 	var = OMP_CLAUSE_DECL (c);
7697 	if (!DECL_P (var))
7698 	  {
7699 	    if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7700 		|| (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7701 		    && (OMP_CLAUSE_MAP_KIND (c)
7702 			!= GOMP_MAP_FIRSTPRIVATE_POINTER)))
7703 	      map_cnt++;
7704 	    continue;
7705 	  }
7706 
7707 	if (DECL_SIZE (var)
7708 	    && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7709 	  {
7710 	    tree var2 = DECL_VALUE_EXPR (var);
7711 	    gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7712 	    var2 = TREE_OPERAND (var2, 0);
7713 	    gcc_assert (DECL_P (var2));
7714 	    var = var2;
7715 	  }
7716 
7717 	if (offloaded
7718 	    && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7719 	    && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7720 		|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7721 	  {
7722 	    if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7723 	      {
7724 		if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7725 		    && varpool_node::get_create (var)->offloadable)
7726 		  continue;
7727 
7728 		tree type = build_pointer_type (TREE_TYPE (var));
7729 		tree new_var = lookup_decl (var, ctx);
7730 		x = create_tmp_var_raw (type, get_name (new_var));
7731 		gimple_add_tmp_var (x);
7732 		x = build_simple_mem_ref (x);
7733 		SET_DECL_VALUE_EXPR (new_var, x);
7734 		DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7735 	      }
7736 	    continue;
7737 	  }
7738 
7739 	if (!maybe_lookup_field (var, ctx))
7740 	  continue;
7741 
7742 	/* Don't remap oacc parallel reduction variables, because the
7743 	   intermediate result must be local to each gang.  */
7744 	if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7745 			   && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7746 	  {
7747 	    x = build_receiver_ref (var, true, ctx);
7748 	    tree new_var = lookup_decl (var, ctx);
7749 
7750 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7751 		&& OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7752 		&& !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7753 		&& TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7754 	      x = build_simple_mem_ref (x);
7755 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7756 	      {
7757 		gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7758 		if (omp_is_reference (new_var))
7759 		  {
7760 		    /* Create a local object to hold the instance
7761 		       value.  */
7762 		    tree type = TREE_TYPE (TREE_TYPE (new_var));
7763 		    const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7764 		    tree inst = create_tmp_var (type, id);
7765 		    gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7766 		    x = build_fold_addr_expr (inst);
7767 		  }
7768 		gimplify_assign (new_var, x, &fplist);
7769 	      }
7770 	    else if (DECL_P (new_var))
7771 	      {
7772 		SET_DECL_VALUE_EXPR (new_var, x);
7773 		DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7774 	      }
7775 	    else
7776 	      gcc_unreachable ();
7777 	  }
7778 	map_cnt++;
7779 	break;
7780 
7781       case OMP_CLAUSE_FIRSTPRIVATE:
7782 	if (is_oacc_parallel (ctx))
7783 	  goto oacc_firstprivate;
7784 	map_cnt++;
7785 	var = OMP_CLAUSE_DECL (c);
7786 	if (!omp_is_reference (var)
7787 	    && !is_gimple_reg_type (TREE_TYPE (var)))
7788 	  {
7789 	    tree new_var = lookup_decl (var, ctx);
7790 	    if (is_variable_sized (var))
7791 	      {
7792 		tree pvar = DECL_VALUE_EXPR (var);
7793 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7794 		pvar = TREE_OPERAND (pvar, 0);
7795 		gcc_assert (DECL_P (pvar));
7796 		tree new_pvar = lookup_decl (pvar, ctx);
7797 		x = build_fold_indirect_ref (new_pvar);
7798 		TREE_THIS_NOTRAP (x) = 1;
7799 	      }
7800 	    else
7801 	      x = build_receiver_ref (var, true, ctx);
7802 	    SET_DECL_VALUE_EXPR (new_var, x);
7803 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7804 	  }
7805 	break;
7806 
7807       case OMP_CLAUSE_PRIVATE:
7808 	if (is_gimple_omp_oacc (ctx->stmt))
7809 	  break;
7810 	var = OMP_CLAUSE_DECL (c);
7811 	if (is_variable_sized (var))
7812 	  {
7813 	    tree new_var = lookup_decl (var, ctx);
7814 	    tree pvar = DECL_VALUE_EXPR (var);
7815 	    gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7816 	    pvar = TREE_OPERAND (pvar, 0);
7817 	    gcc_assert (DECL_P (pvar));
7818 	    tree new_pvar = lookup_decl (pvar, ctx);
7819 	    x = build_fold_indirect_ref (new_pvar);
7820 	    TREE_THIS_NOTRAP (x) = 1;
7821 	    SET_DECL_VALUE_EXPR (new_var, x);
7822 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7823 	  }
7824 	break;
7825 
7826       case OMP_CLAUSE_USE_DEVICE_PTR:
7827       case OMP_CLAUSE_IS_DEVICE_PTR:
7828 	var = OMP_CLAUSE_DECL (c);
7829 	map_cnt++;
7830 	if (is_variable_sized (var))
7831 	  {
7832 	    tree new_var = lookup_decl (var, ctx);
7833 	    tree pvar = DECL_VALUE_EXPR (var);
7834 	    gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7835 	    pvar = TREE_OPERAND (pvar, 0);
7836 	    gcc_assert (DECL_P (pvar));
7837 	    tree new_pvar = lookup_decl (pvar, ctx);
7838 	    x = build_fold_indirect_ref (new_pvar);
7839 	    TREE_THIS_NOTRAP (x) = 1;
7840 	    SET_DECL_VALUE_EXPR (new_var, x);
7841 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7842 	  }
7843 	else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7844 	  {
7845 	    tree new_var = lookup_decl (var, ctx);
7846 	    tree type = build_pointer_type (TREE_TYPE (var));
7847 	    x = create_tmp_var_raw (type, get_name (new_var));
7848 	    gimple_add_tmp_var (x);
7849 	    x = build_simple_mem_ref (x);
7850 	    SET_DECL_VALUE_EXPR (new_var, x);
7851 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7852 	  }
7853 	else
7854 	  {
7855 	    tree new_var = lookup_decl (var, ctx);
7856 	    x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7857 	    gimple_add_tmp_var (x);
7858 	    SET_DECL_VALUE_EXPR (new_var, x);
7859 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7860 	  }
7861 	break;
7862       }
7863 
7864   if (offloaded)
7865     {
7866       target_nesting_level++;
7867       lower_omp (&tgt_body, ctx);
7868       target_nesting_level--;
7869     }
7870   else if (data_region)
7871     lower_omp (&tgt_body, ctx);
7872 
7873   if (offloaded)
7874     {
7875       /* Declare all the variables created by mapping and the variables
7876 	 declared in the scope of the target body.  */
7877       record_vars_into (ctx->block_vars, child_fn);
7878       maybe_remove_omp_member_access_dummy_vars (tgt_bind);
7879       record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7880     }
7881 
7882   olist = NULL;
7883   ilist = NULL;
7884   if (ctx->record_type)
7885     {
7886       ctx->sender_decl
7887 	= create_tmp_var (ctx->record_type, ".omp_data_arr");
7888       DECL_NAMELESS (ctx->sender_decl) = 1;
7889       TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7890       t = make_tree_vec (3);
7891       TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7892       TREE_VEC_ELT (t, 1)
7893 	= create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7894 			  ".omp_data_sizes");
7895       DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7896       TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7897       TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7898       tree tkind_type = short_unsigned_type_node;
7899       int talign_shift = 8;
7900       TREE_VEC_ELT (t, 2)
7901 	= create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7902 			  ".omp_data_kinds");
7903       DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7904       TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7905       TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7906       gimple_omp_target_set_data_arg (stmt, t);
7907 
7908       vec<constructor_elt, va_gc> *vsize;
7909       vec<constructor_elt, va_gc> *vkind;
7910       vec_alloc (vsize, map_cnt);
7911       vec_alloc (vkind, map_cnt);
7912       unsigned int map_idx = 0;
7913 
7914       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7915 	switch (OMP_CLAUSE_CODE (c))
7916 	  {
7917 	    tree ovar, nc, s, purpose, var, x, type;
7918 	    unsigned int talign;
7919 
7920 	  default:
7921 	    break;
7922 
7923 	  case OMP_CLAUSE_MAP:
7924 	  case OMP_CLAUSE_TO:
7925 	  case OMP_CLAUSE_FROM:
7926 	  oacc_firstprivate_map:
7927 	    nc = c;
7928 	    ovar = OMP_CLAUSE_DECL (c);
7929 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7930 		&& (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7931 		    || (OMP_CLAUSE_MAP_KIND (c)
7932 			== GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7933 	      break;
7934 	    if (!DECL_P (ovar))
7935 	      {
7936 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7937 		    && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7938 		  {
7939 		    gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7940 					 == get_base_address (ovar));
7941 		    nc = OMP_CLAUSE_CHAIN (c);
7942 		    ovar = OMP_CLAUSE_DECL (nc);
7943 		  }
7944 		else
7945 		  {
7946 		    tree x = build_sender_ref (ovar, ctx);
7947 		    tree v
7948 		      = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7949 		    gimplify_assign (x, v, &ilist);
7950 		    nc = NULL_TREE;
7951 		  }
7952 	      }
7953 	    else
7954 	      {
7955 		if (DECL_SIZE (ovar)
7956 		    && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7957 		  {
7958 		    tree ovar2 = DECL_VALUE_EXPR (ovar);
7959 		    gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7960 		    ovar2 = TREE_OPERAND (ovar2, 0);
7961 		    gcc_assert (DECL_P (ovar2));
7962 		    ovar = ovar2;
7963 		  }
7964 		if (!maybe_lookup_field (ovar, ctx))
7965 		  continue;
7966 	      }
7967 
7968 	    talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7969 	    if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7970 	      talign = DECL_ALIGN_UNIT (ovar);
7971 	    if (nc)
7972 	      {
7973 		var = lookup_decl_in_outer_ctx (ovar, ctx);
7974 		x = build_sender_ref (ovar, ctx);
7975 
7976 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7977 		    && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7978 		    && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7979 		    && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7980 		  {
7981 		    gcc_assert (offloaded);
7982 		    tree avar
7983 		      = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7984 		    mark_addressable (avar);
7985 		    gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7986 		    talign = DECL_ALIGN_UNIT (avar);
7987 		    avar = build_fold_addr_expr (avar);
7988 		    gimplify_assign (x, avar, &ilist);
7989 		  }
7990 		else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7991 		  {
7992 		    gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7993 		    if (!omp_is_reference (var))
7994 		      {
7995 			if (is_gimple_reg (var)
7996 			    && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7997 			  TREE_NO_WARNING (var) = 1;
7998 			var = build_fold_addr_expr (var);
7999 		      }
8000 		    else
8001 		      talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8002 		    gimplify_assign (x, var, &ilist);
8003 		  }
8004 		else if (is_gimple_reg (var))
8005 		  {
8006 		    gcc_assert (offloaded);
8007 		    tree avar = create_tmp_var (TREE_TYPE (var));
8008 		    mark_addressable (avar);
8009 		    enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
8010 		    if (GOMP_MAP_COPY_TO_P (map_kind)
8011 			|| map_kind == GOMP_MAP_POINTER
8012 			|| map_kind == GOMP_MAP_TO_PSET
8013 			|| map_kind == GOMP_MAP_FORCE_DEVICEPTR)
8014 		      {
8015 			/* If we need to initialize a temporary
8016 			   with VAR because it is not addressable, and
8017 			   the variable hasn't been initialized yet, then
8018 			   we'll get a warning for the store to avar.
8019 			   Don't warn in that case, the mapping might
8020 			   be implicit.  */
8021 			TREE_NO_WARNING (var) = 1;
8022 			gimplify_assign (avar, var, &ilist);
8023 		      }
8024 		    avar = build_fold_addr_expr (avar);
8025 		    gimplify_assign (x, avar, &ilist);
8026 		    if ((GOMP_MAP_COPY_FROM_P (map_kind)
8027 			 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
8028 			&& !TYPE_READONLY (TREE_TYPE (var)))
8029 		      {
8030 			x = unshare_expr (x);
8031 			x = build_simple_mem_ref (x);
8032 			gimplify_assign (var, x, &olist);
8033 		      }
8034 		  }
8035 		else
8036 		  {
8037 		    var = build_fold_addr_expr (var);
8038 		    gimplify_assign (x, var, &ilist);
8039 		  }
8040 	      }
8041 	    s = NULL_TREE;
8042 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
8043 	      {
8044 		gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8045 		s = TREE_TYPE (ovar);
8046 		if (TREE_CODE (s) == REFERENCE_TYPE)
8047 		  s = TREE_TYPE (s);
8048 		s = TYPE_SIZE_UNIT (s);
8049 	      }
8050 	    else
8051 	      s = OMP_CLAUSE_SIZE (c);
8052 	    if (s == NULL_TREE)
8053 	      s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8054 	    s = fold_convert (size_type_node, s);
8055 	    purpose = size_int (map_idx++);
8056 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8057 	    if (TREE_CODE (s) != INTEGER_CST)
8058 	      TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8059 
8060 	    unsigned HOST_WIDE_INT tkind, tkind_zero;
8061 	    switch (OMP_CLAUSE_CODE (c))
8062 	      {
8063 	      case OMP_CLAUSE_MAP:
8064 		tkind = OMP_CLAUSE_MAP_KIND (c);
8065 		tkind_zero = tkind;
8066 		if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
8067 		  switch (tkind)
8068 		    {
8069 		    case GOMP_MAP_ALLOC:
8070 		    case GOMP_MAP_TO:
8071 		    case GOMP_MAP_FROM:
8072 		    case GOMP_MAP_TOFROM:
8073 		    case GOMP_MAP_ALWAYS_TO:
8074 		    case GOMP_MAP_ALWAYS_FROM:
8075 		    case GOMP_MAP_ALWAYS_TOFROM:
8076 		    case GOMP_MAP_RELEASE:
8077 		    case GOMP_MAP_FORCE_TO:
8078 		    case GOMP_MAP_FORCE_FROM:
8079 		    case GOMP_MAP_FORCE_TOFROM:
8080 		    case GOMP_MAP_FORCE_PRESENT:
8081 		      tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
8082 		      break;
8083 		    case GOMP_MAP_DELETE:
8084 		      tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
8085 		    default:
8086 		      break;
8087 		    }
8088 		if (tkind_zero != tkind)
8089 		  {
8090 		    if (integer_zerop (s))
8091 		      tkind = tkind_zero;
8092 		    else if (integer_nonzerop (s))
8093 		      tkind_zero = tkind;
8094 		  }
8095 		break;
8096 	      case OMP_CLAUSE_FIRSTPRIVATE:
8097 		gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8098 		tkind = GOMP_MAP_TO;
8099 		tkind_zero = tkind;
8100 		break;
8101 	      case OMP_CLAUSE_TO:
8102 		tkind = GOMP_MAP_TO;
8103 		tkind_zero = tkind;
8104 		break;
8105 	      case OMP_CLAUSE_FROM:
8106 		tkind = GOMP_MAP_FROM;
8107 		tkind_zero = tkind;
8108 		break;
8109 	      default:
8110 		gcc_unreachable ();
8111 	      }
8112 	    gcc_checking_assert (tkind
8113 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8114 	    gcc_checking_assert (tkind_zero
8115 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8116 	    talign = ceil_log2 (talign);
8117 	    tkind |= talign << talign_shift;
8118 	    tkind_zero |= talign << talign_shift;
8119 	    gcc_checking_assert (tkind
8120 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8121 	    gcc_checking_assert (tkind_zero
8122 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8123 	    if (tkind == tkind_zero)
8124 	      x = build_int_cstu (tkind_type, tkind);
8125 	    else
8126 	      {
8127 		TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8128 		x = build3 (COND_EXPR, tkind_type,
8129 			    fold_build2 (EQ_EXPR, boolean_type_node,
8130 					 unshare_expr (s), size_zero_node),
8131 			    build_int_cstu (tkind_type, tkind_zero),
8132 			    build_int_cstu (tkind_type, tkind));
8133 	      }
8134 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8135 	    if (nc && nc != c)
8136 	      c = nc;
8137 	    break;
8138 
8139 	  case OMP_CLAUSE_FIRSTPRIVATE:
8140 	    if (is_oacc_parallel (ctx))
8141 	      goto oacc_firstprivate_map;
8142 	    ovar = OMP_CLAUSE_DECL (c);
8143 	    if (omp_is_reference (ovar))
8144 	      talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8145 	    else
8146 	      talign = DECL_ALIGN_UNIT (ovar);
8147 	    var = lookup_decl_in_outer_ctx (ovar, ctx);
8148 	    x = build_sender_ref (ovar, ctx);
8149 	    tkind = GOMP_MAP_FIRSTPRIVATE;
8150 	    type = TREE_TYPE (ovar);
8151 	    if (omp_is_reference (ovar))
8152 	      type = TREE_TYPE (type);
8153 	    if ((INTEGRAL_TYPE_P (type)
8154 		 && TYPE_PRECISION (type) <= POINTER_SIZE)
8155 		|| TREE_CODE (type) == POINTER_TYPE)
8156 	      {
8157 		tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8158 		tree t = var;
8159 		if (omp_is_reference (var))
8160 		  t = build_simple_mem_ref (var);
8161 		else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8162 		  TREE_NO_WARNING (var) = 1;
8163 		if (TREE_CODE (type) != POINTER_TYPE)
8164 		  t = fold_convert (pointer_sized_int_node, t);
8165 		t = fold_convert (TREE_TYPE (x), t);
8166 		gimplify_assign (x, t, &ilist);
8167 	      }
8168 	    else if (omp_is_reference (var))
8169 	      gimplify_assign (x, var, &ilist);
8170 	    else if (is_gimple_reg (var))
8171 	      {
8172 		tree avar = create_tmp_var (TREE_TYPE (var));
8173 		mark_addressable (avar);
8174 		if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8175 		  TREE_NO_WARNING (var) = 1;
8176 		gimplify_assign (avar, var, &ilist);
8177 		avar = build_fold_addr_expr (avar);
8178 		gimplify_assign (x, avar, &ilist);
8179 	      }
8180 	    else
8181 	      {
8182 		var = build_fold_addr_expr (var);
8183 		gimplify_assign (x, var, &ilist);
8184 	      }
8185 	    if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8186 	      s = size_int (0);
8187 	    else if (omp_is_reference (ovar))
8188 	      s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8189 	    else
8190 	      s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8191 	    s = fold_convert (size_type_node, s);
8192 	    purpose = size_int (map_idx++);
8193 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8194 	    if (TREE_CODE (s) != INTEGER_CST)
8195 	      TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8196 
8197 	    gcc_checking_assert (tkind
8198 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8199 	    talign = ceil_log2 (talign);
8200 	    tkind |= talign << talign_shift;
8201 	    gcc_checking_assert (tkind
8202 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8203 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8204 				    build_int_cstu (tkind_type, tkind));
8205 	    break;
8206 
8207 	  case OMP_CLAUSE_USE_DEVICE_PTR:
8208 	  case OMP_CLAUSE_IS_DEVICE_PTR:
8209 	    ovar = OMP_CLAUSE_DECL (c);
8210 	    var = lookup_decl_in_outer_ctx (ovar, ctx);
8211 	    x = build_sender_ref (ovar, ctx);
8212 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8213 	      tkind = GOMP_MAP_USE_DEVICE_PTR;
8214 	    else
8215 	      tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8216 	    type = TREE_TYPE (ovar);
8217 	    if (TREE_CODE (type) == ARRAY_TYPE)
8218 	      var = build_fold_addr_expr (var);
8219 	    else
8220 	      {
8221 		if (omp_is_reference (ovar))
8222 		  {
8223 		    type = TREE_TYPE (type);
8224 		    if (TREE_CODE (type) != ARRAY_TYPE)
8225 		      var = build_simple_mem_ref (var);
8226 		    var = fold_convert (TREE_TYPE (x), var);
8227 		  }
8228 	      }
8229 	    gimplify_assign (x, var, &ilist);
8230 	    s = size_int (0);
8231 	    purpose = size_int (map_idx++);
8232 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8233 	    gcc_checking_assert (tkind
8234 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8235 	    gcc_checking_assert (tkind
8236 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8237 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8238 				    build_int_cstu (tkind_type, tkind));
8239 	    break;
8240 	  }
8241 
8242       gcc_assert (map_idx == map_cnt);
8243 
8244       DECL_INITIAL (TREE_VEC_ELT (t, 1))
8245 	= build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8246       DECL_INITIAL (TREE_VEC_ELT (t, 2))
8247 	= build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8248       for (int i = 1; i <= 2; i++)
8249 	if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8250 	  {
8251 	    gimple_seq initlist = NULL;
8252 	    force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8253 					  TREE_VEC_ELT (t, i)),
8254 				  &initlist, true, NULL_TREE);
8255 	    gimple_seq_add_seq (&ilist, initlist);
8256 
8257 	    tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8258 					      NULL);
8259 	    TREE_THIS_VOLATILE (clobber) = 1;
8260 	    gimple_seq_add_stmt (&olist,
8261 				 gimple_build_assign (TREE_VEC_ELT (t, i),
8262 						      clobber));
8263 	  }
8264 
8265       tree clobber = build_constructor (ctx->record_type, NULL);
8266       TREE_THIS_VOLATILE (clobber) = 1;
8267       gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8268 							clobber));
8269     }
8270 
8271   /* Once all the expansions are done, sequence all the different
8272      fragments inside gimple_omp_body.  */
8273 
8274   new_body = NULL;
8275 
8276   if (offloaded
8277       && ctx->record_type)
8278     {
8279       t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8280       /* fixup_child_record_type might have changed receiver_decl's type.  */
8281       t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8282       gimple_seq_add_stmt (&new_body,
8283 	  		   gimple_build_assign (ctx->receiver_decl, t));
8284     }
8285   gimple_seq_add_seq (&new_body, fplist);
8286 
8287   if (offloaded || data_region)
8288     {
8289       tree prev = NULL_TREE;
8290       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8291 	switch (OMP_CLAUSE_CODE (c))
8292 	  {
8293 	    tree var, x;
8294 	  default:
8295 	    break;
8296 	  case OMP_CLAUSE_FIRSTPRIVATE:
8297 	    if (is_gimple_omp_oacc (ctx->stmt))
8298 	      break;
8299 	    var = OMP_CLAUSE_DECL (c);
8300 	    if (omp_is_reference (var)
8301 		|| is_gimple_reg_type (TREE_TYPE (var)))
8302 	      {
8303 		tree new_var = lookup_decl (var, ctx);
8304 		tree type;
8305 		type = TREE_TYPE (var);
8306 		if (omp_is_reference (var))
8307 		  type = TREE_TYPE (type);
8308 		if ((INTEGRAL_TYPE_P (type)
8309 		     && TYPE_PRECISION (type) <= POINTER_SIZE)
8310 		    || TREE_CODE (type) == POINTER_TYPE)
8311 		  {
8312 		    x = build_receiver_ref (var, false, ctx);
8313 		    if (TREE_CODE (type) != POINTER_TYPE)
8314 		      x = fold_convert (pointer_sized_int_node, x);
8315 		    x = fold_convert (type, x);
8316 		    gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8317 				   fb_rvalue);
8318 		    if (omp_is_reference (var))
8319 		      {
8320 			tree v = create_tmp_var_raw (type, get_name (var));
8321 			gimple_add_tmp_var (v);
8322 			TREE_ADDRESSABLE (v) = 1;
8323 			gimple_seq_add_stmt (&new_body,
8324 					     gimple_build_assign (v, x));
8325 			x = build_fold_addr_expr (v);
8326 		      }
8327 		    gimple_seq_add_stmt (&new_body,
8328 					 gimple_build_assign (new_var, x));
8329 		  }
8330 		else
8331 		  {
8332 		    x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8333 		    gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8334 				   fb_rvalue);
8335 		    gimple_seq_add_stmt (&new_body,
8336 					 gimple_build_assign (new_var, x));
8337 		  }
8338 	      }
8339 	    else if (is_variable_sized (var))
8340 	      {
8341 		tree pvar = DECL_VALUE_EXPR (var);
8342 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8343 		pvar = TREE_OPERAND (pvar, 0);
8344 		gcc_assert (DECL_P (pvar));
8345 		tree new_var = lookup_decl (pvar, ctx);
8346 		x = build_receiver_ref (var, false, ctx);
8347 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8348 		gimple_seq_add_stmt (&new_body,
8349 				     gimple_build_assign (new_var, x));
8350 	      }
8351 	    break;
8352 	  case OMP_CLAUSE_PRIVATE:
8353 	    if (is_gimple_omp_oacc (ctx->stmt))
8354 	      break;
8355 	    var = OMP_CLAUSE_DECL (c);
8356 	    if (omp_is_reference (var))
8357 	      {
8358 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8359 		tree new_var = lookup_decl (var, ctx);
8360 		x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8361 		if (TREE_CONSTANT (x))
8362 		  {
8363 		    x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8364 					    get_name (var));
8365 		    gimple_add_tmp_var (x);
8366 		    TREE_ADDRESSABLE (x) = 1;
8367 		    x = build_fold_addr_expr_loc (clause_loc, x);
8368 		  }
8369 		else
8370 		  break;
8371 
8372 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8373 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8374 		gimple_seq_add_stmt (&new_body,
8375 				     gimple_build_assign (new_var, x));
8376 	      }
8377 	    break;
8378 	  case OMP_CLAUSE_USE_DEVICE_PTR:
8379 	  case OMP_CLAUSE_IS_DEVICE_PTR:
8380 	    var = OMP_CLAUSE_DECL (c);
8381 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8382 	      x = build_sender_ref (var, ctx);
8383 	    else
8384 	      x = build_receiver_ref (var, false, ctx);
8385 	    if (is_variable_sized (var))
8386 	      {
8387 		tree pvar = DECL_VALUE_EXPR (var);
8388 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8389 		pvar = TREE_OPERAND (pvar, 0);
8390 		gcc_assert (DECL_P (pvar));
8391 		tree new_var = lookup_decl (pvar, ctx);
8392 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8393 		gimple_seq_add_stmt (&new_body,
8394 				     gimple_build_assign (new_var, x));
8395 	      }
8396 	    else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8397 	      {
8398 		tree new_var = lookup_decl (var, ctx);
8399 		new_var = DECL_VALUE_EXPR (new_var);
8400 		gcc_assert (TREE_CODE (new_var) == MEM_REF);
8401 		new_var = TREE_OPERAND (new_var, 0);
8402 		gcc_assert (DECL_P (new_var));
8403 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8404 		gimple_seq_add_stmt (&new_body,
8405 				     gimple_build_assign (new_var, x));
8406 	      }
8407 	    else
8408 	      {
8409 		tree type = TREE_TYPE (var);
8410 		tree new_var = lookup_decl (var, ctx);
8411 		if (omp_is_reference (var))
8412 		  {
8413 		    type = TREE_TYPE (type);
8414 		    if (TREE_CODE (type) != ARRAY_TYPE)
8415 		      {
8416 			tree v = create_tmp_var_raw (type, get_name (var));
8417 			gimple_add_tmp_var (v);
8418 			TREE_ADDRESSABLE (v) = 1;
8419 			x = fold_convert (type, x);
8420 			gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8421 				       fb_rvalue);
8422 			gimple_seq_add_stmt (&new_body,
8423 					     gimple_build_assign (v, x));
8424 			x = build_fold_addr_expr (v);
8425 		      }
8426 		  }
8427 		new_var = DECL_VALUE_EXPR (new_var);
8428 		x = fold_convert (TREE_TYPE (new_var), x);
8429 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8430 		gimple_seq_add_stmt (&new_body,
8431 				     gimple_build_assign (new_var, x));
8432 	      }
8433 	    break;
8434 	  }
8435       /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8436 	 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8437 	 are already handled.  Similarly OMP_CLAUSE_PRIVATE for VLAs
8438 	 or references to VLAs.  */
8439       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8440 	switch (OMP_CLAUSE_CODE (c))
8441 	  {
8442 	    tree var;
8443 	  default:
8444 	    break;
8445 	  case OMP_CLAUSE_MAP:
8446 	    if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8447 		|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8448 	      {
8449 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8450 		HOST_WIDE_INT offset = 0;
8451 		gcc_assert (prev);
8452 		var = OMP_CLAUSE_DECL (c);
8453 		if (DECL_P (var)
8454 		    && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8455 		    && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8456 								      ctx))
8457 		    && varpool_node::get_create (var)->offloadable)
8458 		  break;
8459 		if (TREE_CODE (var) == INDIRECT_REF
8460 		    && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8461 		  var = TREE_OPERAND (var, 0);
8462 		if (TREE_CODE (var) == COMPONENT_REF)
8463 		  {
8464 		    var = get_addr_base_and_unit_offset (var, &offset);
8465 		    gcc_assert (var != NULL_TREE && DECL_P (var));
8466 		  }
8467 		else if (DECL_SIZE (var)
8468 			 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8469 		  {
8470 		    tree var2 = DECL_VALUE_EXPR (var);
8471 		    gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8472 		    var2 = TREE_OPERAND (var2, 0);
8473 		    gcc_assert (DECL_P (var2));
8474 		    var = var2;
8475 		  }
8476 		tree new_var = lookup_decl (var, ctx), x;
8477 		tree type = TREE_TYPE (new_var);
8478 		bool is_ref;
8479 		if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8480 		    && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8481 			== COMPONENT_REF))
8482 		  {
8483 		    type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8484 		    is_ref = true;
8485 		    new_var = build2 (MEM_REF, type,
8486 				      build_fold_addr_expr (new_var),
8487 				      build_int_cst (build_pointer_type (type),
8488 						     offset));
8489 		  }
8490 		else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8491 		  {
8492 		    type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8493 		    is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8494 		    new_var = build2 (MEM_REF, type,
8495 				      build_fold_addr_expr (new_var),
8496 				      build_int_cst (build_pointer_type (type),
8497 						     offset));
8498 		  }
8499 		else
8500 		  is_ref = omp_is_reference (var);
8501 		if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8502 		  is_ref = false;
8503 		bool ref_to_array = false;
8504 		if (is_ref)
8505 		  {
8506 		    type = TREE_TYPE (type);
8507 		    if (TREE_CODE (type) == ARRAY_TYPE)
8508 		      {
8509 			type = build_pointer_type (type);
8510 			ref_to_array = true;
8511 		      }
8512 		  }
8513 		else if (TREE_CODE (type) == ARRAY_TYPE)
8514 		  {
8515 		    tree decl2 = DECL_VALUE_EXPR (new_var);
8516 		    gcc_assert (TREE_CODE (decl2) == MEM_REF);
8517 		    decl2 = TREE_OPERAND (decl2, 0);
8518 		    gcc_assert (DECL_P (decl2));
8519 		    new_var = decl2;
8520 		    type = TREE_TYPE (new_var);
8521 		  }
8522 		x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8523 		x = fold_convert_loc (clause_loc, type, x);
8524 		if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8525 		  {
8526 		    tree bias = OMP_CLAUSE_SIZE (c);
8527 		    if (DECL_P (bias))
8528 		      bias = lookup_decl (bias, ctx);
8529 		    bias = fold_convert_loc (clause_loc, sizetype, bias);
8530 		    bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8531 					    bias);
8532 		    x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8533 					 TREE_TYPE (x), x, bias);
8534 		  }
8535 		if (ref_to_array)
8536 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8537 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8538 		if (is_ref && !ref_to_array)
8539 		  {
8540 		    tree t = create_tmp_var_raw (type, get_name (var));
8541 		    gimple_add_tmp_var (t);
8542 		    TREE_ADDRESSABLE (t) = 1;
8543 		    gimple_seq_add_stmt (&new_body,
8544 					 gimple_build_assign (t, x));
8545 		    x = build_fold_addr_expr_loc (clause_loc, t);
8546 		  }
8547 		gimple_seq_add_stmt (&new_body,
8548 				     gimple_build_assign (new_var, x));
8549 		prev = NULL_TREE;
8550 	      }
8551 	    else if (OMP_CLAUSE_CHAIN (c)
8552 		     && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8553 			== OMP_CLAUSE_MAP
8554 		     && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8555 			 == GOMP_MAP_FIRSTPRIVATE_POINTER
8556 			 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8557 			     == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8558 	      prev = c;
8559 	    break;
8560 	  case OMP_CLAUSE_PRIVATE:
8561 	    var = OMP_CLAUSE_DECL (c);
8562 	    if (is_variable_sized (var))
8563 	      {
8564 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8565 		tree new_var = lookup_decl (var, ctx);
8566 		tree pvar = DECL_VALUE_EXPR (var);
8567 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8568 		pvar = TREE_OPERAND (pvar, 0);
8569 		gcc_assert (DECL_P (pvar));
8570 		tree new_pvar = lookup_decl (pvar, ctx);
8571 		tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8572 		tree al = size_int (DECL_ALIGN (var));
8573 		tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8574 		x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8575 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8576 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8577 		gimple_seq_add_stmt (&new_body,
8578 				     gimple_build_assign (new_pvar, x));
8579 	      }
8580 	    else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8581 	      {
8582 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8583 		tree new_var = lookup_decl (var, ctx);
8584 		tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8585 		if (TREE_CONSTANT (x))
8586 		  break;
8587 		else
8588 		  {
8589 		    tree atmp
8590 		      = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8591 		    tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8592 		    tree al = size_int (TYPE_ALIGN (rtype));
8593 		    x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8594 		  }
8595 
8596 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8597 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8598 		gimple_seq_add_stmt (&new_body,
8599 				     gimple_build_assign (new_var, x));
8600 	      }
8601 	    break;
8602 	  }
8603 
8604       gimple_seq fork_seq = NULL;
8605       gimple_seq join_seq = NULL;
8606 
8607       if (is_oacc_parallel (ctx))
8608 	{
8609 	  /* If there are reductions on the offloaded region itself, treat
8610 	     them as a dummy GANG loop.  */
8611 	  tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8612 
8613 	  lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8614 				 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8615 	}
8616 
8617       gimple_seq_add_seq (&new_body, fork_seq);
8618       gimple_seq_add_seq (&new_body, tgt_body);
8619       gimple_seq_add_seq (&new_body, join_seq);
8620 
8621       if (offloaded)
8622 	new_body = maybe_catch_exception (new_body);
8623 
8624       gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8625       gimple_omp_set_body (stmt, new_body);
8626     }
8627 
8628   bind = gimple_build_bind (NULL, NULL,
8629 			    tgt_bind ? gimple_bind_block (tgt_bind)
8630 				     : NULL_TREE);
8631   gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8632   gimple_bind_add_seq (bind, ilist);
8633   gimple_bind_add_stmt (bind, stmt);
8634   gimple_bind_add_seq (bind, olist);
8635 
8636   pop_gimplify_context (NULL);
8637 
8638   if (dep_bind)
8639     {
8640       gimple_bind_add_seq (dep_bind, dep_ilist);
8641       gimple_bind_add_stmt (dep_bind, bind);
8642       gimple_bind_add_seq (dep_bind, dep_olist);
8643       pop_gimplify_context (dep_bind);
8644     }
8645 }
8646 
8647 /* Expand code for an OpenMP teams directive.  */
8648 
8649 static void
8650 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8651 {
8652   gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8653   push_gimplify_context ();
8654 
8655   tree block = make_node (BLOCK);
8656   gbind *bind = gimple_build_bind (NULL, NULL, block);
8657   gsi_replace (gsi_p, bind, true);
8658   gimple_seq bind_body = NULL;
8659   gimple_seq dlist = NULL;
8660   gimple_seq olist = NULL;
8661 
8662   tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8663 				    OMP_CLAUSE_NUM_TEAMS);
8664   if (num_teams == NULL_TREE)
8665     num_teams = build_int_cst (unsigned_type_node, 0);
8666   else
8667     {
8668       num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8669       num_teams = fold_convert (unsigned_type_node, num_teams);
8670       gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8671     }
8672   tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8673 				       OMP_CLAUSE_THREAD_LIMIT);
8674   if (thread_limit == NULL_TREE)
8675     thread_limit = build_int_cst (unsigned_type_node, 0);
8676   else
8677     {
8678       thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8679       thread_limit = fold_convert (unsigned_type_node, thread_limit);
8680       gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8681 		     fb_rvalue);
8682     }
8683 
8684   lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8685 			   &bind_body, &dlist, ctx, NULL);
8686   lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8687   lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8688   if (!gimple_omp_teams_grid_phony (teams_stmt))
8689     {
8690       gimple_seq_add_stmt (&bind_body, teams_stmt);
8691       location_t loc = gimple_location (teams_stmt);
8692       tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8693       gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8694       gimple_set_location (call, loc);
8695       gimple_seq_add_stmt (&bind_body, call);
8696     }
8697 
8698   gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8699   gimple_omp_set_body (teams_stmt, NULL);
8700   gimple_seq_add_seq (&bind_body, olist);
8701   gimple_seq_add_seq (&bind_body, dlist);
8702   if (!gimple_omp_teams_grid_phony (teams_stmt))
8703     gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8704   gimple_bind_set_body (bind, bind_body);
8705 
8706   pop_gimplify_context (bind);
8707 
8708   gimple_bind_append_vars (bind, ctx->block_vars);
8709   BLOCK_VARS (block) = ctx->block_vars;
8710   if (BLOCK_VARS (block))
8711     TREE_USED (block) = 1;
8712 }
8713 
8714 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct.  */
8715 
8716 static void
8717 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8718 {
8719   gimple *stmt = gsi_stmt (*gsi_p);
8720   lower_omp (gimple_omp_body_ptr (stmt), ctx);
8721   gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8722 		       gimple_build_omp_return (false));
8723 }
8724 
8725 
8726 /* Callback for lower_omp_1.  Return non-NULL if *tp needs to be
8727    regimplified.  If DATA is non-NULL, lower_omp_1 is outside
8728    of OMP context, but with task_shared_vars set.  */
8729 
8730 static tree
8731 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8732     			void *data)
8733 {
8734   tree t = *tp;
8735 
8736   /* Any variable with DECL_VALUE_EXPR needs to be regimplified.  */
8737   if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8738     return t;
8739 
8740   if (task_shared_vars
8741       && DECL_P (t)
8742       && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8743     return t;
8744 
8745   /* If a global variable has been privatized, TREE_CONSTANT on
8746      ADDR_EXPR might be wrong.  */
8747   if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8748     recompute_tree_invariant_for_addr_expr (t);
8749 
8750   *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8751   return NULL_TREE;
8752 }
8753 
8754 /* Data to be communicated between lower_omp_regimplify_operands and
8755    lower_omp_regimplify_operands_p.  */
8756 
8757 struct lower_omp_regimplify_operands_data
8758 {
8759   omp_context *ctx;
8760   vec<tree> *decls;
8761 };
8762 
8763 /* Helper function for lower_omp_regimplify_operands.  Find
8764    omp_member_access_dummy_var vars and adjust temporarily their
8765    DECL_VALUE_EXPRs if needed.  */
8766 
8767 static tree
8768 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8769 				 void *data)
8770 {
8771   tree t = omp_member_access_dummy_var (*tp);
8772   if (t)
8773     {
8774       struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8775       lower_omp_regimplify_operands_data *ldata
8776 	= (lower_omp_regimplify_operands_data *) wi->info;
8777       tree o = maybe_lookup_decl (t, ldata->ctx);
8778       if (o != t)
8779 	{
8780 	  ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8781 	  ldata->decls->safe_push (*tp);
8782 	  tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8783 	  SET_DECL_VALUE_EXPR (*tp, v);
8784 	}
8785     }
8786   *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8787   return NULL_TREE;
8788 }
8789 
8790 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8791    of omp_member_access_dummy_var vars during regimplification.  */
8792 
8793 static void
8794 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8795 			       gimple_stmt_iterator *gsi_p)
8796 {
8797   auto_vec<tree, 10> decls;
8798   if (ctx)
8799     {
8800       struct walk_stmt_info wi;
8801       memset (&wi, '\0', sizeof (wi));
8802       struct lower_omp_regimplify_operands_data data;
8803       data.ctx = ctx;
8804       data.decls = &decls;
8805       wi.info = &data;
8806       walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8807     }
8808   gimple_regimplify_operands (stmt, gsi_p);
8809   while (!decls.is_empty ())
8810     {
8811       tree t = decls.pop ();
8812       tree v = decls.pop ();
8813       SET_DECL_VALUE_EXPR (t, v);
8814     }
8815 }
8816 
8817 static void
8818 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8819 {
8820   gimple *stmt = gsi_stmt (*gsi_p);
8821   struct walk_stmt_info wi;
8822   gcall *call_stmt;
8823 
8824   if (gimple_has_location (stmt))
8825     input_location = gimple_location (stmt);
8826 
8827   if (task_shared_vars)
8828     memset (&wi, '\0', sizeof (wi));
8829 
8830   /* If we have issued syntax errors, avoid doing any heavy lifting.
8831      Just replace the OMP directives with a NOP to avoid
8832      confusing RTL expansion.  */
8833   if (seen_error () && is_gimple_omp (stmt))
8834     {
8835       gsi_replace (gsi_p, gimple_build_nop (), true);
8836       return;
8837     }
8838 
8839   switch (gimple_code (stmt))
8840     {
8841     case GIMPLE_COND:
8842       {
8843 	gcond *cond_stmt = as_a <gcond *> (stmt);
8844 	if ((ctx || task_shared_vars)
8845 	    && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8846 			   lower_omp_regimplify_p,
8847 			   ctx ? NULL : &wi, NULL)
8848 		|| walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8849 			      lower_omp_regimplify_p,
8850 			      ctx ? NULL : &wi, NULL)))
8851 	  lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8852       }
8853       break;
8854     case GIMPLE_CATCH:
8855       lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8856       break;
8857     case GIMPLE_EH_FILTER:
8858       lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8859       break;
8860     case GIMPLE_TRY:
8861       lower_omp (gimple_try_eval_ptr (stmt), ctx);
8862       lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8863       break;
8864     case GIMPLE_TRANSACTION:
8865       lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8866 		 ctx);
8867       break;
8868     case GIMPLE_BIND:
8869       lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8870       maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
8871       break;
8872     case GIMPLE_OMP_PARALLEL:
8873     case GIMPLE_OMP_TASK:
8874       ctx = maybe_lookup_ctx (stmt);
8875       gcc_assert (ctx);
8876       if (ctx->cancellable)
8877 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8878       lower_omp_taskreg (gsi_p, ctx);
8879       break;
8880     case GIMPLE_OMP_FOR:
8881       ctx = maybe_lookup_ctx (stmt);
8882       gcc_assert (ctx);
8883       if (ctx->cancellable)
8884 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8885       lower_omp_for (gsi_p, ctx);
8886       break;
8887     case GIMPLE_OMP_SECTIONS:
8888       ctx = maybe_lookup_ctx (stmt);
8889       gcc_assert (ctx);
8890       if (ctx->cancellable)
8891 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8892       lower_omp_sections (gsi_p, ctx);
8893       break;
8894     case GIMPLE_OMP_SINGLE:
8895       ctx = maybe_lookup_ctx (stmt);
8896       gcc_assert (ctx);
8897       lower_omp_single (gsi_p, ctx);
8898       break;
8899     case GIMPLE_OMP_MASTER:
8900       ctx = maybe_lookup_ctx (stmt);
8901       gcc_assert (ctx);
8902       lower_omp_master (gsi_p, ctx);
8903       break;
8904     case GIMPLE_OMP_TASKGROUP:
8905       ctx = maybe_lookup_ctx (stmt);
8906       gcc_assert (ctx);
8907       lower_omp_taskgroup (gsi_p, ctx);
8908       break;
8909     case GIMPLE_OMP_ORDERED:
8910       ctx = maybe_lookup_ctx (stmt);
8911       gcc_assert (ctx);
8912       lower_omp_ordered (gsi_p, ctx);
8913       break;
8914     case GIMPLE_OMP_CRITICAL:
8915       ctx = maybe_lookup_ctx (stmt);
8916       gcc_assert (ctx);
8917       lower_omp_critical (gsi_p, ctx);
8918       break;
8919     case GIMPLE_OMP_ATOMIC_LOAD:
8920       if ((ctx || task_shared_vars)
8921 	  && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8922 			  as_a <gomp_atomic_load *> (stmt)),
8923 			lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8924 	lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8925       break;
8926     case GIMPLE_OMP_TARGET:
8927       ctx = maybe_lookup_ctx (stmt);
8928       gcc_assert (ctx);
8929       lower_omp_target (gsi_p, ctx);
8930       break;
8931     case GIMPLE_OMP_TEAMS:
8932       ctx = maybe_lookup_ctx (stmt);
8933       gcc_assert (ctx);
8934       lower_omp_teams (gsi_p, ctx);
8935       break;
8936     case GIMPLE_OMP_GRID_BODY:
8937       ctx = maybe_lookup_ctx (stmt);
8938       gcc_assert (ctx);
8939       lower_omp_grid_body (gsi_p, ctx);
8940       break;
8941     case GIMPLE_CALL:
8942       tree fndecl;
8943       call_stmt = as_a <gcall *> (stmt);
8944       fndecl = gimple_call_fndecl (call_stmt);
8945       if (fndecl
8946 	  && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8947 	switch (DECL_FUNCTION_CODE (fndecl))
8948 	  {
8949 	  case BUILT_IN_GOMP_BARRIER:
8950 	    if (ctx == NULL)
8951 	      break;
8952 	    /* FALLTHRU */
8953 	  case BUILT_IN_GOMP_CANCEL:
8954 	  case BUILT_IN_GOMP_CANCELLATION_POINT:
8955 	    omp_context *cctx;
8956 	    cctx = ctx;
8957 	    if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8958 	      cctx = cctx->outer;
8959 	    gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8960 	    if (!cctx->cancellable)
8961 	      {
8962 		if (DECL_FUNCTION_CODE (fndecl)
8963 		    == BUILT_IN_GOMP_CANCELLATION_POINT)
8964 		  {
8965 		    stmt = gimple_build_nop ();
8966 		    gsi_replace (gsi_p, stmt, false);
8967 		  }
8968 		break;
8969 	      }
8970 	    if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8971 	      {
8972 		fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8973 		gimple_call_set_fndecl (call_stmt, fndecl);
8974 		gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8975 	      }
8976 	    tree lhs;
8977 	    lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8978 	    gimple_call_set_lhs (call_stmt, lhs);
8979 	    tree fallthru_label;
8980 	    fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8981 	    gimple *g;
8982 	    g = gimple_build_label (fallthru_label);
8983 	    gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8984 	    g = gimple_build_cond (NE_EXPR, lhs,
8985 				   fold_convert (TREE_TYPE (lhs),
8986 						 boolean_false_node),
8987 				   cctx->cancel_label, fallthru_label);
8988 	    gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8989 	    break;
8990 	  default:
8991 	    break;
8992 	  }
8993       /* FALLTHRU */
8994     default:
8995       if ((ctx || task_shared_vars)
8996 	  && walk_gimple_op (stmt, lower_omp_regimplify_p,
8997 			     ctx ? NULL : &wi))
8998 	{
8999 	  /* Just remove clobbers, this should happen only if we have
9000 	     "privatized" local addressable variables in SIMD regions,
9001 	     the clobber isn't needed in that case and gimplifying address
9002 	     of the ARRAY_REF into a pointer and creating MEM_REF based
9003 	     clobber would create worse code than we get with the clobber
9004 	     dropped.  */
9005 	  if (gimple_clobber_p (stmt))
9006 	    {
9007 	      gsi_replace (gsi_p, gimple_build_nop (), true);
9008 	      break;
9009 	    }
9010 	  lower_omp_regimplify_operands (ctx, stmt, gsi_p);
9011 	}
9012       break;
9013     }
9014 }
9015 
9016 static void
9017 lower_omp (gimple_seq *body, omp_context *ctx)
9018 {
9019   location_t saved_location = input_location;
9020   gimple_stmt_iterator gsi;
9021   for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
9022     lower_omp_1 (&gsi, ctx);
9023   /* During gimplification, we haven't folded statments inside offloading
9024      or taskreg regions (gimplify.c:maybe_fold_stmt); do that now.  */
9025   if (target_nesting_level || taskreg_nesting_level)
9026     for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
9027       fold_stmt (&gsi);
9028   input_location = saved_location;
9029 }
9030 
9031 /* Main entry point.  */
9032 
9033 static unsigned int
9034 execute_lower_omp (void)
9035 {
9036   gimple_seq body;
9037   int i;
9038   omp_context *ctx;
9039 
9040   /* This pass always runs, to provide PROP_gimple_lomp.
9041      But often, there is nothing to do.  */
9042   if (flag_cilkplus == 0 && flag_openacc == 0 && flag_openmp == 0
9043       && flag_openmp_simd == 0)
9044     return 0;
9045 
9046   all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
9047 				 delete_omp_context);
9048 
9049   body = gimple_body (current_function_decl);
9050 
9051   if (hsa_gen_requested_p ())
9052     omp_grid_gridify_all_targets (&body);
9053 
9054   scan_omp (&body, NULL);
9055   gcc_assert (taskreg_nesting_level == 0);
9056   FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
9057     finish_taskreg_scan (ctx);
9058   taskreg_contexts.release ();
9059 
9060   if (all_contexts->root)
9061     {
9062       if (task_shared_vars)
9063 	push_gimplify_context ();
9064       lower_omp (&body, NULL);
9065       if (task_shared_vars)
9066 	pop_gimplify_context (NULL);
9067     }
9068 
9069   if (all_contexts)
9070     {
9071       splay_tree_delete (all_contexts);
9072       all_contexts = NULL;
9073     }
9074   BITMAP_FREE (task_shared_vars);
9075 
9076   /* If current function is a method, remove artificial dummy VAR_DECL created
9077      for non-static data member privatization, they aren't needed for
9078      debuginfo nor anything else, have been already replaced everywhere in the
9079      IL and cause problems with LTO.  */
9080   if (DECL_ARGUMENTS (current_function_decl)
9081       && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
9082       && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
9083 	  == POINTER_TYPE))
9084     remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
9085   return 0;
9086 }
9087 
9088 namespace {
9089 
9090 const pass_data pass_data_lower_omp =
9091 {
9092   GIMPLE_PASS, /* type */
9093   "omplower", /* name */
9094   OPTGROUP_OMP, /* optinfo_flags */
9095   TV_NONE, /* tv_id */
9096   PROP_gimple_any, /* properties_required */
9097   PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
9098   0, /* properties_destroyed */
9099   0, /* todo_flags_start */
9100   0, /* todo_flags_finish */
9101 };
9102 
9103 class pass_lower_omp : public gimple_opt_pass
9104 {
9105 public:
9106   pass_lower_omp (gcc::context *ctxt)
9107     : gimple_opt_pass (pass_data_lower_omp, ctxt)
9108   {}
9109 
9110   /* opt_pass methods: */
9111   virtual unsigned int execute (function *) { return execute_lower_omp (); }
9112 
9113 }; // class pass_lower_omp
9114 
9115 } // anon namespace
9116 
9117 gimple_opt_pass *
9118 make_pass_lower_omp (gcc::context *ctxt)
9119 {
9120   return new pass_lower_omp (ctxt);
9121 }
9122 
9123 /* The following is a utility to diagnose structured block violations.
9124    It is not part of the "omplower" pass, as that's invoked too late.  It
9125    should be invoked by the respective front ends after gimplification.  */
9126 
9127 static splay_tree all_labels;
9128 
9129 /* Check for mismatched contexts and generate an error if needed.  Return
9130    true if an error is detected.  */
9131 
9132 static bool
9133 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9134 	       gimple *branch_ctx, gimple *label_ctx)
9135 {
9136   gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9137   gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9138 
9139   if (label_ctx == branch_ctx)
9140     return false;
9141 
9142   const char* kind = NULL;
9143 
9144   if (flag_cilkplus)
9145     {
9146       if ((branch_ctx
9147 	   && gimple_code (branch_ctx) == GIMPLE_OMP_FOR
9148 	   && gimple_omp_for_kind (branch_ctx) == GF_OMP_FOR_KIND_CILKSIMD)
9149 	  || (label_ctx
9150 	      && gimple_code (label_ctx) == GIMPLE_OMP_FOR
9151 	      && gimple_omp_for_kind (label_ctx) == GF_OMP_FOR_KIND_CILKSIMD))
9152 	kind = "Cilk Plus";
9153     }
9154   if (flag_openacc)
9155     {
9156       if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9157 	  || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9158 	{
9159 	  gcc_checking_assert (kind == NULL);
9160 	  kind = "OpenACC";
9161 	}
9162     }
9163   if (kind == NULL)
9164     {
9165       gcc_checking_assert (flag_openmp || flag_openmp_simd);
9166       kind = "OpenMP";
9167     }
9168 
9169   /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9170      so we could traverse it and issue a correct "exit" or "enter" error
9171      message upon a structured block violation.
9172 
9173      We built the context by building a list with tree_cons'ing, but there is
9174      no easy counterpart in gimple tuples.  It seems like far too much work
9175      for issuing exit/enter error messages.  If someone really misses the
9176      distinct error message... patches welcome.  */
9177 
9178 #if 0
9179   /* Try to avoid confusing the user by producing and error message
9180      with correct "exit" or "enter" verbiage.  We prefer "exit"
9181      unless we can show that LABEL_CTX is nested within BRANCH_CTX.  */
9182   if (branch_ctx == NULL)
9183     exit_p = false;
9184   else
9185     {
9186       while (label_ctx)
9187 	{
9188 	  if (TREE_VALUE (label_ctx) == branch_ctx)
9189 	    {
9190 	      exit_p = false;
9191 	      break;
9192 	    }
9193 	  label_ctx = TREE_CHAIN (label_ctx);
9194 	}
9195     }
9196 
9197   if (exit_p)
9198     error ("invalid exit from %s structured block", kind);
9199   else
9200     error ("invalid entry to %s structured block", kind);
9201 #endif
9202 
9203   /* If it's obvious we have an invalid entry, be specific about the error.  */
9204   if (branch_ctx == NULL)
9205     error ("invalid entry to %s structured block", kind);
9206   else
9207     {
9208       /* Otherwise, be vague and lazy, but efficient.  */
9209       error ("invalid branch to/from %s structured block", kind);
9210     }
9211 
9212   gsi_replace (gsi_p, gimple_build_nop (), false);
9213   return true;
9214 }
9215 
9216 /* Pass 1: Create a minimal tree of structured blocks, and record
9217    where each label is found.  */
9218 
9219 static tree
9220 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9221     	       struct walk_stmt_info *wi)
9222 {
9223   gimple *context = (gimple *) wi->info;
9224   gimple *inner_context;
9225   gimple *stmt = gsi_stmt (*gsi_p);
9226 
9227   *handled_ops_p = true;
9228 
9229   switch (gimple_code (stmt))
9230     {
9231     WALK_SUBSTMTS;
9232 
9233     case GIMPLE_OMP_PARALLEL:
9234     case GIMPLE_OMP_TASK:
9235     case GIMPLE_OMP_SECTIONS:
9236     case GIMPLE_OMP_SINGLE:
9237     case GIMPLE_OMP_SECTION:
9238     case GIMPLE_OMP_MASTER:
9239     case GIMPLE_OMP_ORDERED:
9240     case GIMPLE_OMP_CRITICAL:
9241     case GIMPLE_OMP_TARGET:
9242     case GIMPLE_OMP_TEAMS:
9243     case GIMPLE_OMP_TASKGROUP:
9244       /* The minimal context here is just the current OMP construct.  */
9245       inner_context = stmt;
9246       wi->info = inner_context;
9247       walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9248       wi->info = context;
9249       break;
9250 
9251     case GIMPLE_OMP_FOR:
9252       inner_context = stmt;
9253       wi->info = inner_context;
9254       /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9255 	 walk them.  */
9256       walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9257 	  	       diagnose_sb_1, NULL, wi);
9258       walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9259       wi->info = context;
9260       break;
9261 
9262     case GIMPLE_LABEL:
9263       splay_tree_insert (all_labels,
9264 			 (splay_tree_key) gimple_label_label (
9265 					    as_a <glabel *> (stmt)),
9266 			 (splay_tree_value) context);
9267       break;
9268 
9269     default:
9270       break;
9271     }
9272 
9273   return NULL_TREE;
9274 }
9275 
9276 /* Pass 2: Check each branch and see if its context differs from that of
9277    the destination label's context.  */
9278 
9279 static tree
9280 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9281     	       struct walk_stmt_info *wi)
9282 {
9283   gimple *context = (gimple *) wi->info;
9284   splay_tree_node n;
9285   gimple *stmt = gsi_stmt (*gsi_p);
9286 
9287   *handled_ops_p = true;
9288 
9289   switch (gimple_code (stmt))
9290     {
9291     WALK_SUBSTMTS;
9292 
9293     case GIMPLE_OMP_PARALLEL:
9294     case GIMPLE_OMP_TASK:
9295     case GIMPLE_OMP_SECTIONS:
9296     case GIMPLE_OMP_SINGLE:
9297     case GIMPLE_OMP_SECTION:
9298     case GIMPLE_OMP_MASTER:
9299     case GIMPLE_OMP_ORDERED:
9300     case GIMPLE_OMP_CRITICAL:
9301     case GIMPLE_OMP_TARGET:
9302     case GIMPLE_OMP_TEAMS:
9303     case GIMPLE_OMP_TASKGROUP:
9304       wi->info = stmt;
9305       walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9306       wi->info = context;
9307       break;
9308 
9309     case GIMPLE_OMP_FOR:
9310       wi->info = stmt;
9311       /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9312 	 walk them.  */
9313       walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9314 			   diagnose_sb_2, NULL, wi);
9315       walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9316       wi->info = context;
9317       break;
9318 
9319     case GIMPLE_COND:
9320 	{
9321 	  gcond *cond_stmt = as_a <gcond *> (stmt);
9322 	  tree lab = gimple_cond_true_label (cond_stmt);
9323 	  if (lab)
9324 	    {
9325 	      n = splay_tree_lookup (all_labels,
9326 				     (splay_tree_key) lab);
9327 	      diagnose_sb_0 (gsi_p, context,
9328 			     n ? (gimple *) n->value : NULL);
9329 	    }
9330 	  lab = gimple_cond_false_label (cond_stmt);
9331 	  if (lab)
9332 	    {
9333 	      n = splay_tree_lookup (all_labels,
9334 				     (splay_tree_key) lab);
9335 	      diagnose_sb_0 (gsi_p, context,
9336 			     n ? (gimple *) n->value : NULL);
9337 	    }
9338 	}
9339       break;
9340 
9341     case GIMPLE_GOTO:
9342       {
9343 	tree lab = gimple_goto_dest (stmt);
9344 	if (TREE_CODE (lab) != LABEL_DECL)
9345 	  break;
9346 
9347 	n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9348 	diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9349       }
9350       break;
9351 
9352     case GIMPLE_SWITCH:
9353       {
9354 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
9355 	unsigned int i;
9356 	for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9357 	  {
9358 	    tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9359 	    n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9360 	    if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9361 	      break;
9362 	  }
9363       }
9364       break;
9365 
9366     case GIMPLE_RETURN:
9367       diagnose_sb_0 (gsi_p, context, NULL);
9368       break;
9369 
9370     default:
9371       break;
9372     }
9373 
9374   return NULL_TREE;
9375 }
9376 
9377 static unsigned int
9378 diagnose_omp_structured_block_errors (void)
9379 {
9380   struct walk_stmt_info wi;
9381   gimple_seq body = gimple_body (current_function_decl);
9382 
9383   all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9384 
9385   memset (&wi, 0, sizeof (wi));
9386   walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9387 
9388   memset (&wi, 0, sizeof (wi));
9389   wi.want_locations = true;
9390   walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9391 
9392   gimple_set_body (current_function_decl, body);
9393 
9394   splay_tree_delete (all_labels);
9395   all_labels = NULL;
9396 
9397   return 0;
9398 }
9399 
9400 namespace {
9401 
9402 const pass_data pass_data_diagnose_omp_blocks =
9403 {
9404   GIMPLE_PASS, /* type */
9405   "*diagnose_omp_blocks", /* name */
9406   OPTGROUP_OMP, /* optinfo_flags */
9407   TV_NONE, /* tv_id */
9408   PROP_gimple_any, /* properties_required */
9409   0, /* properties_provided */
9410   0, /* properties_destroyed */
9411   0, /* todo_flags_start */
9412   0, /* todo_flags_finish */
9413 };
9414 
9415 class pass_diagnose_omp_blocks : public gimple_opt_pass
9416 {
9417 public:
9418   pass_diagnose_omp_blocks (gcc::context *ctxt)
9419     : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9420   {}
9421 
9422   /* opt_pass methods: */
9423   virtual bool gate (function *)
9424   {
9425     return flag_cilkplus || flag_openacc || flag_openmp || flag_openmp_simd;
9426   }
9427   virtual unsigned int execute (function *)
9428     {
9429       return diagnose_omp_structured_block_errors ();
9430     }
9431 
9432 }; // class pass_diagnose_omp_blocks
9433 
9434 } // anon namespace
9435 
9436 gimple_opt_pass *
9437 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9438 {
9439   return new pass_diagnose_omp_blocks (ctxt);
9440 }
9441 
9442 
9443 #include "gt-omp-low.h"
9444