xref: /netbsd-src/external/gpl3/gcc/dist/gcc/c-family/c-omp.cc (revision b1e838363e3c6fc78a55519254d99869742dd33c)
1 /* This file contains routines to construct OpenACC and OpenMP constructs,
2    called from parsing in the C and C++ front ends.
3 
4    Copyright (C) 2005-2022 Free Software Foundation, Inc.
5    Contributed by Richard Henderson <rth@redhat.com>,
6 		  Diego Novillo <dnovillo@redhat.com>.
7 
8 This file is part of GCC.
9 
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
14 
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
18 for more details.
19 
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3.  If not see
22 <http://www.gnu.org/licenses/>.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "options.h"
28 #include "c-common.h"
29 #include "gimple-expr.h"
30 #include "c-pragma.h"
31 #include "stringpool.h"
32 #include "omp-general.h"
33 #include "gomp-constants.h"
34 #include "memmodel.h"
35 #include "attribs.h"
36 #include "gimplify.h"
37 #include "langhooks.h"
38 #include "bitmap.h"
39 #include "gimple-fold.h"
40 
41 
42 /* Complete a #pragma oacc wait construct.  LOC is the location of
43    the #pragma.  */
44 
45 tree
c_finish_oacc_wait(location_t loc,tree parms,tree clauses)46 c_finish_oacc_wait (location_t loc, tree parms, tree clauses)
47 {
48   const int nparms = list_length (parms);
49   tree stmt, t;
50   vec<tree, va_gc> *args;
51 
52   vec_alloc (args, nparms + 2);
53   stmt = builtin_decl_explicit (BUILT_IN_GOACC_WAIT);
54 
55   if (omp_find_clause (clauses, OMP_CLAUSE_ASYNC))
56     t = OMP_CLAUSE_ASYNC_EXPR (clauses);
57   else
58     t = build_int_cst (integer_type_node, GOMP_ASYNC_SYNC);
59 
60   args->quick_push (t);
61   args->quick_push (build_int_cst (integer_type_node, nparms));
62 
63   for (t = parms; t; t = TREE_CHAIN (t))
64     {
65       if (TREE_CODE (OMP_CLAUSE_WAIT_EXPR (t)) == INTEGER_CST)
66 	args->quick_push (build_int_cst (integer_type_node,
67 			TREE_INT_CST_LOW (OMP_CLAUSE_WAIT_EXPR (t))));
68       else
69 	args->quick_push (OMP_CLAUSE_WAIT_EXPR (t));
70     }
71 
72   stmt = build_call_expr_loc_vec (loc, stmt, args);
73 
74   vec_free (args);
75 
76   return stmt;
77 }
78 
79 /* Complete a #pragma omp master construct.  STMT is the structured-block
80    that follows the pragma.  LOC is the location of the #pragma.  */
81 
82 tree
c_finish_omp_master(location_t loc,tree stmt)83 c_finish_omp_master (location_t loc, tree stmt)
84 {
85   tree t = add_stmt (build1 (OMP_MASTER, void_type_node, stmt));
86   SET_EXPR_LOCATION (t, loc);
87   return t;
88 }
89 
90 /* Complete a #pragma omp masked construct.  BODY is the structured-block
91    that follows the pragma.  LOC is the location of the #pragma.  */
92 
93 tree
c_finish_omp_masked(location_t loc,tree body,tree clauses)94 c_finish_omp_masked (location_t loc, tree body, tree clauses)
95 {
96   tree stmt = make_node (OMP_MASKED);
97   TREE_TYPE (stmt) = void_type_node;
98   OMP_MASKED_BODY (stmt) = body;
99   OMP_MASKED_CLAUSES (stmt) = clauses;
100   SET_EXPR_LOCATION (stmt, loc);
101   return add_stmt (stmt);
102 }
103 
104 /* Complete a #pragma omp taskgroup construct.  BODY is the structured-block
105    that follows the pragma.  LOC is the location of the #pragma.  */
106 
107 tree
c_finish_omp_taskgroup(location_t loc,tree body,tree clauses)108 c_finish_omp_taskgroup (location_t loc, tree body, tree clauses)
109 {
110   tree stmt = make_node (OMP_TASKGROUP);
111   TREE_TYPE (stmt) = void_type_node;
112   OMP_TASKGROUP_BODY (stmt) = body;
113   OMP_TASKGROUP_CLAUSES (stmt) = clauses;
114   SET_EXPR_LOCATION (stmt, loc);
115   return add_stmt (stmt);
116 }
117 
118 /* Complete a #pragma omp critical construct.  BODY is the structured-block
119    that follows the pragma, NAME is the identifier in the pragma, or null
120    if it was omitted.  LOC is the location of the #pragma.  */
121 
122 tree
c_finish_omp_critical(location_t loc,tree body,tree name,tree clauses)123 c_finish_omp_critical (location_t loc, tree body, tree name, tree clauses)
124 {
125   gcc_assert (!clauses || OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_HINT);
126   if (name == NULL_TREE
127       && clauses != NULL_TREE
128       && integer_nonzerop (OMP_CLAUSE_HINT_EXPR (clauses)))
129     {
130       error_at (OMP_CLAUSE_LOCATION (clauses),
131 		"%<#pragma omp critical%> with %<hint%> clause requires "
132 		"a name, except when %<omp_sync_hint_none%> is used");
133       return error_mark_node;
134     }
135 
136   tree stmt = make_node (OMP_CRITICAL);
137   TREE_TYPE (stmt) = void_type_node;
138   OMP_CRITICAL_BODY (stmt) = body;
139   OMP_CRITICAL_NAME (stmt) = name;
140   OMP_CRITICAL_CLAUSES (stmt) = clauses;
141   SET_EXPR_LOCATION (stmt, loc);
142   return add_stmt (stmt);
143 }
144 
145 /* Complete a #pragma omp ordered construct.  STMT is the structured-block
146    that follows the pragma.  LOC is the location of the #pragma.  */
147 
148 tree
c_finish_omp_ordered(location_t loc,tree clauses,tree stmt)149 c_finish_omp_ordered (location_t loc, tree clauses, tree stmt)
150 {
151   tree t = make_node (OMP_ORDERED);
152   TREE_TYPE (t) = void_type_node;
153   OMP_ORDERED_BODY (t) = stmt;
154   if (!flag_openmp	/* flag_openmp_simd */
155       && (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_SIMD
156 	  || OMP_CLAUSE_CHAIN (clauses)))
157     clauses = build_omp_clause (loc, OMP_CLAUSE_SIMD);
158   OMP_ORDERED_CLAUSES (t) = clauses;
159   SET_EXPR_LOCATION (t, loc);
160   return add_stmt (t);
161 }
162 
163 
164 /* Complete a #pragma omp barrier construct.  LOC is the location of
165    the #pragma.  */
166 
167 void
c_finish_omp_barrier(location_t loc)168 c_finish_omp_barrier (location_t loc)
169 {
170   tree x;
171 
172   x = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER);
173   x = build_call_expr_loc (loc, x, 0);
174   add_stmt (x);
175 }
176 
177 
178 /* Complete a #pragma omp taskwait construct.  LOC is the location of the
179    pragma.  */
180 
181 void
c_finish_omp_taskwait(location_t loc)182 c_finish_omp_taskwait (location_t loc)
183 {
184   tree x;
185 
186   x = builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT);
187   x = build_call_expr_loc (loc, x, 0);
188   add_stmt (x);
189 }
190 
191 
192 /* Complete a #pragma omp taskyield construct.  LOC is the location of the
193    pragma.  */
194 
195 void
c_finish_omp_taskyield(location_t loc)196 c_finish_omp_taskyield (location_t loc)
197 {
198   tree x;
199 
200   x = builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD);
201   x = build_call_expr_loc (loc, x, 0);
202   add_stmt (x);
203 }
204 
205 
206 /* Complete a #pragma omp atomic construct.  For CODE OMP_ATOMIC
207    the expression to be implemented atomically is LHS opcode= RHS.
208    For OMP_ATOMIC_READ V = LHS, for OMP_ATOMIC_CAPTURE_{NEW,OLD} LHS
209    opcode= RHS with the new or old content of LHS returned.
210    LOC is the location of the atomic statement.  The value returned
211    is either error_mark_node (if the construct was erroneous) or an
212    OMP_ATOMIC* node which should be added to the current statement
213    tree with add_stmt.  If TEST is set, avoid calling save_expr
214    or create_tmp_var*.  */
215 
216 tree
c_finish_omp_atomic(location_t loc,enum tree_code code,enum tree_code opcode,tree lhs,tree rhs,tree v,tree lhs1,tree rhs1,tree r,bool swapped,enum omp_memory_order memory_order,bool weak,bool test)217 c_finish_omp_atomic (location_t loc, enum tree_code code,
218 		     enum tree_code opcode, tree lhs, tree rhs,
219 		     tree v, tree lhs1, tree rhs1, tree r, bool swapped,
220 		     enum omp_memory_order memory_order, bool weak,
221 		     bool test)
222 {
223   tree x, type, addr, pre = NULL_TREE, rtmp = NULL_TREE, vtmp = NULL_TREE;
224   HOST_WIDE_INT bitpos = 0, bitsize = 0;
225   enum tree_code orig_opcode = opcode;
226 
227   if (lhs == error_mark_node || rhs == error_mark_node
228       || v == error_mark_node || lhs1 == error_mark_node
229       || rhs1 == error_mark_node || r == error_mark_node)
230     return error_mark_node;
231 
232   /* ??? According to one reading of the OpenMP spec, complex type are
233      supported, but there are no atomic stores for any architecture.
234      But at least icc 9.0 doesn't support complex types here either.
235      And lets not even talk about vector types...  */
236   type = TREE_TYPE (lhs);
237   if (!INTEGRAL_TYPE_P (type)
238       && !POINTER_TYPE_P (type)
239       && !SCALAR_FLOAT_TYPE_P (type))
240     {
241       error_at (loc, "invalid expression type for %<#pragma omp atomic%>");
242       return error_mark_node;
243     }
244   if (TYPE_ATOMIC (type))
245     {
246       error_at (loc, "%<_Atomic%> expression in %<#pragma omp atomic%>");
247       return error_mark_node;
248     }
249   if (r && r != void_list_node && !INTEGRAL_TYPE_P (TREE_TYPE (r)))
250     {
251       error_at (loc, "%<#pragma omp atomic compare capture%> with non-integral "
252 		     "comparison result");
253       return error_mark_node;
254     }
255 
256   if (opcode == RDIV_EXPR)
257     opcode = TRUNC_DIV_EXPR;
258 
259   /* ??? Validate that rhs does not overlap lhs.  */
260   tree blhs = NULL;
261   if (TREE_CODE (lhs) == COMPONENT_REF
262       && TREE_CODE (TREE_OPERAND (lhs, 1)) == FIELD_DECL
263       && DECL_C_BIT_FIELD (TREE_OPERAND (lhs, 1))
264       && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (lhs, 1)))
265     {
266       tree field = TREE_OPERAND (lhs, 1);
267       tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
268       if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
269 	  && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
270 	bitpos = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
271 		  - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
272       else
273 	bitpos = 0;
274       bitpos += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
275 		 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
276       gcc_assert (tree_fits_shwi_p (DECL_SIZE (field)));
277       bitsize = tree_to_shwi (DECL_SIZE (field));
278       blhs = lhs;
279       type = TREE_TYPE (repr);
280       lhs = build3 (COMPONENT_REF, TREE_TYPE (repr), TREE_OPERAND (lhs, 0),
281 		    repr, TREE_OPERAND (lhs, 2));
282     }
283 
284   /* Take and save the address of the lhs.  From then on we'll reference it
285      via indirection.  */
286   addr = build_unary_op (loc, ADDR_EXPR, lhs, false);
287   if (addr == error_mark_node)
288     return error_mark_node;
289   if (!test)
290     addr = save_expr (addr);
291   if (!test
292       && TREE_CODE (addr) != SAVE_EXPR
293       && (TREE_CODE (addr) != ADDR_EXPR
294 	  || !VAR_P (TREE_OPERAND (addr, 0))))
295     {
296       /* Make sure LHS is simple enough so that goa_lhs_expr_p can recognize
297 	 it even after unsharing function body.  */
298       tree var = create_tmp_var_raw (TREE_TYPE (addr));
299       DECL_CONTEXT (var) = current_function_decl;
300       addr = build4 (TARGET_EXPR, TREE_TYPE (addr), var, addr, NULL, NULL);
301     }
302   tree orig_lhs = lhs;
303   lhs = build_indirect_ref (loc, addr, RO_NULL);
304   tree new_lhs = lhs;
305 
306   if (code == OMP_ATOMIC_READ)
307     {
308       x = build1 (OMP_ATOMIC_READ, type, addr);
309       SET_EXPR_LOCATION (x, loc);
310       OMP_ATOMIC_MEMORY_ORDER (x) = memory_order;
311       gcc_assert (!weak);
312       if (blhs)
313 	x = build3_loc (loc, BIT_FIELD_REF, TREE_TYPE (blhs), x,
314 			bitsize_int (bitsize), bitsize_int (bitpos));
315       return build_modify_expr (loc, v, NULL_TREE, NOP_EXPR,
316 				loc, x, NULL_TREE);
317     }
318 
319   /* There are lots of warnings, errors, and conversions that need to happen
320      in the course of interpreting a statement.  Use the normal mechanisms
321      to do this, and then take it apart again.  */
322   if (blhs)
323     {
324       lhs = build3_loc (loc, BIT_FIELD_REF, TREE_TYPE (blhs), lhs,
325 			bitsize_int (bitsize), bitsize_int (bitpos));
326       if (opcode == COND_EXPR)
327 	{
328 	  bool save = in_late_binary_op;
329 	  in_late_binary_op = true;
330 	  std::swap (rhs, rhs1);
331 	  rhs1 = build_binary_op (loc, EQ_EXPR, lhs, rhs1, true);
332 	  in_late_binary_op = save;
333 	}
334       else if (swapped)
335 	rhs = build_binary_op (loc, opcode, rhs, lhs, true);
336       else if (opcode != NOP_EXPR)
337 	rhs = build_binary_op (loc, opcode, lhs, rhs, true);
338       opcode = NOP_EXPR;
339     }
340   else if (opcode == COND_EXPR)
341     {
342       bool save = in_late_binary_op;
343       in_late_binary_op = true;
344       std::swap (rhs, rhs1);
345       rhs1 = build_binary_op (loc, EQ_EXPR, lhs, rhs1, true);
346       in_late_binary_op = save;
347       opcode = NOP_EXPR;
348     }
349   else if (swapped)
350     {
351       rhs = build_binary_op (loc, opcode, rhs, lhs, true);
352       opcode = NOP_EXPR;
353     }
354   bool save = in_late_binary_op;
355   in_late_binary_op = true;
356   if ((opcode == MIN_EXPR || opcode == MAX_EXPR)
357       && build_binary_op (loc, LT_EXPR, blhs ? blhs : lhs, rhs,
358 			  true) == error_mark_node)
359     x = error_mark_node;
360   else
361     x = build_modify_expr (loc, blhs ? blhs : lhs, NULL_TREE, opcode,
362 			   loc, rhs, NULL_TREE);
363   in_late_binary_op = save;
364   if (x == error_mark_node)
365     return error_mark_node;
366   if (TREE_CODE (x) == COMPOUND_EXPR)
367     {
368       pre = TREE_OPERAND (x, 0);
369       gcc_assert (TREE_CODE (pre) == SAVE_EXPR || tree_invariant_p (pre));
370       x = TREE_OPERAND (x, 1);
371     }
372   gcc_assert (TREE_CODE (x) == MODIFY_EXPR);
373   rhs = TREE_OPERAND (x, 1);
374 
375   if (blhs)
376     rhs = build3_loc (loc, BIT_INSERT_EXPR, type, new_lhs,
377 		      rhs, bitsize_int (bitpos));
378   if (orig_opcode == COND_EXPR)
379     {
380       if (error_operand_p (rhs1))
381 	return error_mark_node;
382       gcc_assert (TREE_CODE (rhs1) == EQ_EXPR);
383       tree cmptype = TREE_TYPE (TREE_OPERAND (rhs1, 0));
384       if (SCALAR_FLOAT_TYPE_P (cmptype) && !test)
385 	{
386 	  bool clear_padding = false;
387 	  HOST_WIDE_INT non_padding_start = 0;
388 	  HOST_WIDE_INT non_padding_end = 0;
389 	  if (BITS_PER_UNIT == 8
390 	      && CHAR_BIT == 8
391 	      && clear_padding_type_may_have_padding_p (cmptype))
392 	    {
393 	      HOST_WIDE_INT sz = int_size_in_bytes (cmptype), i;
394 	      gcc_assert (sz > 0);
395 	      unsigned char *buf = XALLOCAVEC (unsigned char, sz);
396 	      memset (buf, ~0, sz);
397 	      clear_type_padding_in_mask (cmptype, buf);
398 	      for (i = 0; i < sz; i++)
399 		if (buf[i] != (unsigned char) ~0)
400 		  {
401 		    clear_padding = true;
402 		    break;
403 		  }
404 	      if (clear_padding && buf[i] == 0)
405 		{
406 		  /* Try to optimize.  In the common case where
407 		     non-padding bits are all continuous and start
408 		     and end at a byte boundary, we can just adjust
409 		     the memcmp call arguments and don't need to
410 		     emit __builtin_clear_padding calls.  */
411 		  if (i == 0)
412 		    {
413 		      for (i = 0; i < sz; i++)
414 			if (buf[i] != 0)
415 			  break;
416 		      if (i < sz && buf[i] == (unsigned char) ~0)
417 			{
418 			  non_padding_start = i;
419 			  for (; i < sz; i++)
420 			    if (buf[i] != (unsigned char) ~0)
421 			      break;
422 			}
423 		      else
424 			i = 0;
425 		    }
426 		  if (i != 0)
427 		    {
428 		      non_padding_end = i;
429 		      for (; i < sz; i++)
430 			if (buf[i] != 0)
431 			  {
432 			    non_padding_start = 0;
433 			    non_padding_end = 0;
434 			    break;
435 			  }
436 		    }
437 		}
438 	    }
439 	  tree inttype = NULL_TREE;
440 	  if (!clear_padding && tree_fits_uhwi_p (TYPE_SIZE (cmptype)))
441 	    {
442 	      HOST_WIDE_INT prec = tree_to_uhwi (TYPE_SIZE (cmptype));
443 	      inttype = c_common_type_for_size (prec, 1);
444 	      if (inttype
445 		  && (!tree_int_cst_equal (TYPE_SIZE (cmptype),
446 					   TYPE_SIZE (inttype))
447 		      || TYPE_PRECISION (inttype) != prec))
448 		inttype = NULL_TREE;
449 	    }
450 	  if (inttype)
451 	    {
452 	      TREE_OPERAND (rhs1, 0)
453 		= build1_loc (loc, VIEW_CONVERT_EXPR, inttype,
454 			      TREE_OPERAND (rhs1, 0));
455 	      TREE_OPERAND (rhs1, 1)
456 		= build1_loc (loc, VIEW_CONVERT_EXPR, inttype,
457 			      TREE_OPERAND (rhs1, 1));
458 	    }
459 	  else
460 	    {
461 	      tree pcmptype = build_pointer_type (cmptype);
462 	      tree tmp1 = create_tmp_var_raw (cmptype);
463 	      TREE_ADDRESSABLE (tmp1) = 1;
464 	      DECL_CONTEXT (tmp1) = current_function_decl;
465 	      tmp1 = build4 (TARGET_EXPR, cmptype, tmp1,
466 			     TREE_OPERAND (rhs1, 0), NULL, NULL);
467 	      tmp1 = build1 (ADDR_EXPR, pcmptype, tmp1);
468 	      tree tmp2 = create_tmp_var_raw (cmptype);
469 	      TREE_ADDRESSABLE (tmp2) = 1;
470 	      DECL_CONTEXT (tmp2) = current_function_decl;
471 	      tmp2 = build4 (TARGET_EXPR, cmptype, tmp2,
472 			     TREE_OPERAND (rhs1, 1), NULL, NULL);
473 	      tmp2 = build1 (ADDR_EXPR, pcmptype, tmp2);
474 	      if (non_padding_start)
475 		{
476 		  tmp1 = build2 (POINTER_PLUS_EXPR, pcmptype, tmp1,
477 				 size_int (non_padding_start));
478 		  tmp2 = build2 (POINTER_PLUS_EXPR, pcmptype, tmp2,
479 				 size_int (non_padding_start));
480 		}
481 	      tree fndecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
482 	      rhs1 = build_call_expr_loc (loc, fndecl, 3, tmp1, tmp2,
483 					  non_padding_end
484 					  ? size_int (non_padding_end
485 						      - non_padding_start)
486 					  : TYPE_SIZE_UNIT (cmptype));
487 	      rhs1 = build2 (EQ_EXPR, boolean_type_node, rhs1,
488 			     integer_zero_node);
489 	      if (clear_padding && non_padding_end == 0)
490 		{
491 		  fndecl = builtin_decl_explicit (BUILT_IN_CLEAR_PADDING);
492 		  tree cp1 = build_call_expr_loc (loc, fndecl, 1, tmp1);
493 		  tree cp2 = build_call_expr_loc (loc, fndecl, 1, tmp2);
494 		  rhs1 = omit_two_operands_loc (loc, boolean_type_node,
495 						rhs1, cp2, cp1);
496 		}
497 	    }
498 	}
499       if (r && test)
500 	rtmp = rhs1;
501       else if (r)
502 	{
503 	  tree var = create_tmp_var_raw (boolean_type_node);
504 	  DECL_CONTEXT (var) = current_function_decl;
505 	  rtmp = build4 (TARGET_EXPR, boolean_type_node, var,
506 			 boolean_false_node, NULL, NULL);
507 	  save = in_late_binary_op;
508 	  in_late_binary_op = true;
509 	  x = build_modify_expr (loc, var, NULL_TREE, NOP_EXPR,
510 				 loc, rhs1, NULL_TREE);
511 	  in_late_binary_op = save;
512 	  if (x == error_mark_node)
513 	    return error_mark_node;
514 	  gcc_assert (TREE_CODE (x) == MODIFY_EXPR
515 		      && TREE_OPERAND (x, 0) == var);
516 	  TREE_OPERAND (x, 0) = rtmp;
517 	  rhs1 = omit_one_operand_loc (loc, boolean_type_node, x, rtmp);
518 	}
519       rhs = build3_loc (loc, COND_EXPR, type, rhs1, rhs, new_lhs);
520       rhs1 = NULL_TREE;
521     }
522 
523   /* Punt the actual generation of atomic operations to common code.  */
524   if (code == OMP_ATOMIC)
525     type = void_type_node;
526   x = build2 (code, type, addr, rhs);
527   SET_EXPR_LOCATION (x, loc);
528   OMP_ATOMIC_MEMORY_ORDER (x) = memory_order;
529   OMP_ATOMIC_WEAK (x) = weak;
530 
531   /* Generally it is hard to prove lhs1 and lhs are the same memory
532      location, just diagnose different variables.  */
533   if (rhs1
534       && VAR_P (rhs1)
535       && VAR_P (orig_lhs)
536       && rhs1 != orig_lhs
537       && !test)
538     {
539       if (code == OMP_ATOMIC)
540 	error_at (loc, "%<#pragma omp atomic update%> uses two different "
541 		       "variables for memory");
542       else
543 	error_at (loc, "%<#pragma omp atomic capture%> uses two different "
544 		       "variables for memory");
545       return error_mark_node;
546     }
547 
548   if (lhs1
549       && lhs1 != orig_lhs
550       && TREE_CODE (lhs1) == COMPONENT_REF
551       && TREE_CODE (TREE_OPERAND (lhs1, 1)) == FIELD_DECL
552       && DECL_C_BIT_FIELD (TREE_OPERAND (lhs1, 1))
553       && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (lhs1, 1)))
554     {
555       tree field = TREE_OPERAND (lhs1, 1);
556       tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
557       lhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr), TREE_OPERAND (lhs1, 0),
558 		     repr, TREE_OPERAND (lhs1, 2));
559     }
560   if (rhs1
561       && rhs1 != orig_lhs
562       && TREE_CODE (rhs1) == COMPONENT_REF
563       && TREE_CODE (TREE_OPERAND (rhs1, 1)) == FIELD_DECL
564       && DECL_C_BIT_FIELD (TREE_OPERAND (rhs1, 1))
565       && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (rhs1, 1)))
566     {
567       tree field = TREE_OPERAND (rhs1, 1);
568       tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
569       rhs1 = build3 (COMPONENT_REF, TREE_TYPE (repr), TREE_OPERAND (rhs1, 0),
570 		     repr, TREE_OPERAND (rhs1, 2));
571     }
572 
573   if (code != OMP_ATOMIC)
574     {
575       /* Generally it is hard to prove lhs1 and lhs are the same memory
576 	 location, just diagnose different variables.  */
577       if (lhs1 && VAR_P (lhs1) && VAR_P (orig_lhs))
578 	{
579 	  if (lhs1 != orig_lhs && !test)
580 	    {
581 	      error_at (loc, "%<#pragma omp atomic capture%> uses two "
582 			     "different variables for memory");
583 	      return error_mark_node;
584 	    }
585 	}
586       if (blhs)
587 	x = build3_loc (loc, BIT_FIELD_REF, TREE_TYPE (blhs), x,
588 			bitsize_int (bitsize), bitsize_int (bitpos));
589       if (r && !test)
590 	{
591 	  vtmp = create_tmp_var_raw (TREE_TYPE (x));
592 	  DECL_CONTEXT (vtmp) = current_function_decl;
593 	}
594       else
595 	vtmp = v;
596       x = build_modify_expr (loc, vtmp, NULL_TREE, NOP_EXPR,
597 			     loc, x, NULL_TREE);
598       if (x == error_mark_node)
599 	return error_mark_node;
600       type = TREE_TYPE (x);
601       if (r && !test)
602 	{
603 	  vtmp = build4 (TARGET_EXPR, TREE_TYPE (vtmp), vtmp,
604 			 build_zero_cst (TREE_TYPE (vtmp)), NULL, NULL);
605 	  gcc_assert (TREE_CODE (x) == MODIFY_EXPR
606 		      && TREE_OPERAND (x, 0) == TARGET_EXPR_SLOT (vtmp));
607 	  TREE_OPERAND (x, 0) = vtmp;
608 	}
609       if (rhs1 && rhs1 != orig_lhs)
610 	{
611 	  tree rhs1addr = build_unary_op (loc, ADDR_EXPR, rhs1, false);
612 	  if (rhs1addr == error_mark_node)
613 	    return error_mark_node;
614 	  x = omit_one_operand_loc (loc, type, x, rhs1addr);
615 	}
616       if (lhs1 && lhs1 != orig_lhs)
617 	{
618 	  tree lhs1addr = build_unary_op (loc, ADDR_EXPR, lhs1, false);
619 	  if (lhs1addr == error_mark_node)
620 	    return error_mark_node;
621 	  if (code == OMP_ATOMIC_CAPTURE_OLD)
622 	    x = omit_one_operand_loc (loc, type, x, lhs1addr);
623 	  else
624 	    {
625 	      if (!test)
626 		x = save_expr (x);
627 	      x = omit_two_operands_loc (loc, type, x, x, lhs1addr);
628 	    }
629 	}
630     }
631   else if (rhs1 && rhs1 != orig_lhs)
632     {
633       tree rhs1addr = build_unary_op (loc, ADDR_EXPR, rhs1, false);
634       if (rhs1addr == error_mark_node)
635 	return error_mark_node;
636       x = omit_one_operand_loc (loc, type, x, rhs1addr);
637     }
638 
639   if (pre)
640     x = omit_one_operand_loc (loc, type, x, pre);
641   if (r && r != void_list_node)
642     {
643       in_late_binary_op = true;
644       tree x2 = build_modify_expr (loc, r, NULL_TREE, NOP_EXPR,
645 				   loc, rtmp, NULL_TREE);
646       in_late_binary_op = save;
647       if (x2 == error_mark_node)
648 	return error_mark_node;
649       x = omit_one_operand_loc (loc, TREE_TYPE (x2), x2, x);
650     }
651   if (v && vtmp != v)
652     {
653       in_late_binary_op = true;
654       tree x2 = build_modify_expr (loc, v, NULL_TREE, NOP_EXPR,
655 				   loc, vtmp, NULL_TREE);
656       in_late_binary_op = save;
657       if (x2 == error_mark_node)
658 	return error_mark_node;
659       x2 = build3_loc (loc, COND_EXPR, void_type_node, rtmp,
660 		       void_node, x2);
661       x = omit_one_operand_loc (loc, TREE_TYPE (x2), x2, x);
662     }
663   return x;
664 }
665 
666 
667 /* Return true if TYPE is the implementation's omp_depend_t.  */
668 
669 bool
c_omp_depend_t_p(tree type)670 c_omp_depend_t_p (tree type)
671 {
672   type = TYPE_MAIN_VARIANT (type);
673   return (TREE_CODE (type) == RECORD_TYPE
674 	  && TYPE_NAME (type)
675 	  && ((TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
676 	       ? DECL_NAME (TYPE_NAME (type)) : TYPE_NAME (type))
677 	      == get_identifier ("omp_depend_t"))
678 	  && (!TYPE_CONTEXT (type)
679 	      || TREE_CODE (TYPE_CONTEXT (type)) == TRANSLATION_UNIT_DECL)
680 	  && COMPLETE_TYPE_P (type)
681 	  && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
682 	  && !compare_tree_int (TYPE_SIZE (type),
683 				2 * tree_to_uhwi (TYPE_SIZE (ptr_type_node))));
684 }
685 
686 
687 /* Complete a #pragma omp depobj construct.  LOC is the location of the
688    #pragma.  */
689 
690 void
c_finish_omp_depobj(location_t loc,tree depobj,enum omp_clause_depend_kind kind,tree clause)691 c_finish_omp_depobj (location_t loc, tree depobj,
692 		     enum omp_clause_depend_kind kind, tree clause)
693 {
694   tree t = NULL_TREE;
695   if (!error_operand_p (depobj))
696     {
697       if (!c_omp_depend_t_p (TREE_TYPE (depobj)))
698 	{
699 	  error_at (EXPR_LOC_OR_LOC (depobj, loc),
700 		    "type of %<depobj%> expression is not %<omp_depend_t%>");
701 	  depobj = error_mark_node;
702 	}
703       else if (TYPE_READONLY (TREE_TYPE (depobj)))
704 	{
705 	  error_at (EXPR_LOC_OR_LOC (depobj, loc),
706 		    "%<const%> qualified %<depobj%> expression");
707 	  depobj = error_mark_node;
708 	}
709     }
710   else
711     depobj = error_mark_node;
712 
713   if (clause == error_mark_node)
714     return;
715 
716   if (clause)
717     {
718       gcc_assert (TREE_CODE (clause) == OMP_CLAUSE
719 		  && OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_DEPEND);
720       if (OMP_CLAUSE_CHAIN (clause))
721 	error_at (OMP_CLAUSE_LOCATION (clause),
722 		  "more than one locator in %<depend%> clause on %<depobj%> "
723 		  "construct");
724       switch (OMP_CLAUSE_DEPEND_KIND (clause))
725 	{
726 	case OMP_CLAUSE_DEPEND_DEPOBJ:
727 	  error_at (OMP_CLAUSE_LOCATION (clause),
728 		    "%<depobj%> dependence type specified in %<depend%> "
729 		    "clause on %<depobj%> construct");
730 	  return;
731 	case OMP_CLAUSE_DEPEND_SOURCE:
732 	case OMP_CLAUSE_DEPEND_SINK:
733 	  error_at (OMP_CLAUSE_LOCATION (clause),
734 		    "%<depend(%s)%> is only allowed in %<omp ordered%>",
735 		    OMP_CLAUSE_DEPEND_KIND (clause) == OMP_CLAUSE_DEPEND_SOURCE
736 		    ? "source" : "sink");
737 	  return;
738 	case OMP_CLAUSE_DEPEND_IN:
739 	case OMP_CLAUSE_DEPEND_OUT:
740 	case OMP_CLAUSE_DEPEND_INOUT:
741 	case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
742 	  kind = OMP_CLAUSE_DEPEND_KIND (clause);
743 	  t = OMP_CLAUSE_DECL (clause);
744 	  gcc_assert (t);
745 	  if (TREE_CODE (t) == TREE_LIST
746 	      && TREE_PURPOSE (t)
747 	      && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
748 	    {
749 	      error_at (OMP_CLAUSE_LOCATION (clause),
750 			"%<iterator%> modifier may not be specified on "
751 			"%<depobj%> construct");
752 	      return;
753 	    }
754 	  if (TREE_CODE (t) == COMPOUND_EXPR)
755 	    {
756 	      tree t1 = build_fold_addr_expr (TREE_OPERAND (t, 1));
757 	      t = build2 (COMPOUND_EXPR, TREE_TYPE (t1), TREE_OPERAND (t, 0),
758 			  t1);
759 	    }
760 	  else
761 	    t = build_fold_addr_expr (t);
762 	  break;
763 	default:
764 	  gcc_unreachable ();
765 	}
766     }
767   else
768     gcc_assert (kind != OMP_CLAUSE_DEPEND_SOURCE);
769 
770   if (depobj == error_mark_node)
771     return;
772 
773   depobj = build_fold_addr_expr_loc (EXPR_LOC_OR_LOC (depobj, loc), depobj);
774   tree dtype
775     = build_pointer_type_for_mode (ptr_type_node, TYPE_MODE (ptr_type_node),
776 				   true);
777   depobj = fold_convert (dtype, depobj);
778   tree r;
779   if (clause)
780     {
781       depobj = save_expr (depobj);
782       r = build_indirect_ref (loc, depobj, RO_UNARY_STAR);
783       add_stmt (build2 (MODIFY_EXPR, void_type_node, r, t));
784     }
785   int k;
786   switch (kind)
787     {
788     case OMP_CLAUSE_DEPEND_IN:
789       k = GOMP_DEPEND_IN;
790       break;
791     case OMP_CLAUSE_DEPEND_OUT:
792       k = GOMP_DEPEND_OUT;
793       break;
794     case OMP_CLAUSE_DEPEND_INOUT:
795       k = GOMP_DEPEND_INOUT;
796       break;
797     case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
798       k = GOMP_DEPEND_MUTEXINOUTSET;
799       break;
800     case OMP_CLAUSE_DEPEND_LAST:
801       k = -1;
802       break;
803     default:
804       gcc_unreachable ();
805     }
806   t = build_int_cst (ptr_type_node, k);
807   depobj = build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (depobj), depobj,
808 		       TYPE_SIZE_UNIT (ptr_type_node));
809   r = build_indirect_ref (loc, depobj, RO_UNARY_STAR);
810   add_stmt (build2 (MODIFY_EXPR, void_type_node, r, t));
811 }
812 
813 
814 /* Complete a #pragma omp flush construct.  We don't do anything with
815    the variable list that the syntax allows.  LOC is the location of
816    the #pragma.  */
817 
818 void
c_finish_omp_flush(location_t loc,int mo)819 c_finish_omp_flush (location_t loc, int mo)
820 {
821   tree x;
822 
823   if (mo == MEMMODEL_LAST || mo == MEMMODEL_SEQ_CST)
824     {
825       x = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
826       x = build_call_expr_loc (loc, x, 0);
827     }
828   else
829     {
830       x = builtin_decl_explicit (BUILT_IN_ATOMIC_THREAD_FENCE);
831       x = build_call_expr_loc (loc, x, 1,
832 			       build_int_cst (integer_type_node, mo));
833     }
834   add_stmt (x);
835 }
836 
837 
838 /* Check and canonicalize OMP_FOR increment expression.
839    Helper function for c_finish_omp_for.  */
840 
841 static tree
check_omp_for_incr_expr(location_t loc,tree exp,tree decl)842 check_omp_for_incr_expr (location_t loc, tree exp, tree decl)
843 {
844   tree t;
845 
846   if (!INTEGRAL_TYPE_P (TREE_TYPE (exp))
847       || TYPE_PRECISION (TREE_TYPE (exp)) < TYPE_PRECISION (TREE_TYPE (decl)))
848     return error_mark_node;
849 
850   if (exp == decl)
851     return build_int_cst (TREE_TYPE (exp), 0);
852 
853   switch (TREE_CODE (exp))
854     {
855     CASE_CONVERT:
856       t = check_omp_for_incr_expr (loc, TREE_OPERAND (exp, 0), decl);
857       if (t != error_mark_node)
858         return fold_convert_loc (loc, TREE_TYPE (exp), t);
859       break;
860     case MINUS_EXPR:
861       t = check_omp_for_incr_expr (loc, TREE_OPERAND (exp, 0), decl);
862       if (t != error_mark_node)
863         return fold_build2_loc (loc, MINUS_EXPR,
864 				TREE_TYPE (exp), t, TREE_OPERAND (exp, 1));
865       break;
866     case PLUS_EXPR:
867       t = check_omp_for_incr_expr (loc, TREE_OPERAND (exp, 0), decl);
868       if (t != error_mark_node)
869         return fold_build2_loc (loc, PLUS_EXPR,
870 				TREE_TYPE (exp), t, TREE_OPERAND (exp, 1));
871       t = check_omp_for_incr_expr (loc, TREE_OPERAND (exp, 1), decl);
872       if (t != error_mark_node)
873         return fold_build2_loc (loc, PLUS_EXPR,
874 				TREE_TYPE (exp), TREE_OPERAND (exp, 0), t);
875       break;
876     case COMPOUND_EXPR:
877       {
878 	/* cp_build_modify_expr forces preevaluation of the RHS to make
879 	   sure that it is evaluated before the lvalue-rvalue conversion
880 	   is applied to the LHS.  Reconstruct the original expression.  */
881 	tree op0 = TREE_OPERAND (exp, 0);
882 	if (TREE_CODE (op0) == TARGET_EXPR
883 	    && !VOID_TYPE_P (TREE_TYPE (op0)))
884 	  {
885 	    tree op1 = TREE_OPERAND (exp, 1);
886 	    tree temp = TARGET_EXPR_SLOT (op0);
887 	    if (BINARY_CLASS_P (op1)
888 		&& TREE_OPERAND (op1, 1) == temp)
889 	      {
890 		op1 = copy_node (op1);
891 		TREE_OPERAND (op1, 1) = TARGET_EXPR_INITIAL (op0);
892 		return check_omp_for_incr_expr (loc, op1, decl);
893 	      }
894 	  }
895 	break;
896       }
897     default:
898       break;
899     }
900 
901   return error_mark_node;
902 }
903 
904 /* If the OMP_FOR increment expression in INCR is of pointer type,
905    canonicalize it into an expression handled by gimplify_omp_for()
906    and return it.  DECL is the iteration variable.  */
907 
908 static tree
c_omp_for_incr_canonicalize_ptr(location_t loc,tree decl,tree incr)909 c_omp_for_incr_canonicalize_ptr (location_t loc, tree decl, tree incr)
910 {
911   if (POINTER_TYPE_P (TREE_TYPE (decl))
912       && TREE_OPERAND (incr, 1))
913     {
914       tree t = fold_convert_loc (loc,
915 				 sizetype, TREE_OPERAND (incr, 1));
916 
917       if (TREE_CODE (incr) == POSTDECREMENT_EXPR
918 	  || TREE_CODE (incr) == PREDECREMENT_EXPR)
919 	t = fold_build1_loc (loc, NEGATE_EXPR, sizetype, t);
920       t = fold_build_pointer_plus (decl, t);
921       incr = build2 (MODIFY_EXPR, void_type_node, decl, t);
922     }
923   return incr;
924 }
925 
926 /* Validate and generate OMP_FOR.
927    DECLV is a vector of iteration variables, for each collapsed loop.
928 
929    ORIG_DECLV, if non-NULL, is a vector with the original iteration
930    variables (prior to any transformations, by say, C++ iterators).
931 
932    INITV, CONDV and INCRV are vectors containing initialization
933    expressions, controlling predicates and increment expressions.
934    BODY is the body of the loop and PRE_BODY statements that go before
935    the loop.  */
936 
937 tree
c_finish_omp_for(location_t locus,enum tree_code code,tree declv,tree orig_declv,tree initv,tree condv,tree incrv,tree body,tree pre_body,bool final_p)938 c_finish_omp_for (location_t locus, enum tree_code code, tree declv,
939 		  tree orig_declv, tree initv, tree condv, tree incrv,
940 		  tree body, tree pre_body, bool final_p)
941 {
942   location_t elocus;
943   bool fail = false;
944   int i;
945 
946   gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (initv));
947   gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (condv));
948   gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (incrv));
949   for (i = 0; i < TREE_VEC_LENGTH (declv); i++)
950     {
951       tree decl = TREE_VEC_ELT (declv, i);
952       tree init = TREE_VEC_ELT (initv, i);
953       tree cond = TREE_VEC_ELT (condv, i);
954       tree incr = TREE_VEC_ELT (incrv, i);
955 
956       elocus = locus;
957       if (EXPR_HAS_LOCATION (init))
958 	elocus = EXPR_LOCATION (init);
959 
960       /* Validate the iteration variable.  */
961       if (!INTEGRAL_TYPE_P (TREE_TYPE (decl))
962 	  && TREE_CODE (TREE_TYPE (decl)) != POINTER_TYPE)
963 	{
964 	  error_at (elocus, "invalid type for iteration variable %qE", decl);
965 	  fail = true;
966 	}
967       else if (TYPE_ATOMIC (TREE_TYPE (decl)))
968 	{
969 	  error_at (elocus, "%<_Atomic%> iteration variable %qE", decl);
970 	  fail = true;
971 	  /* _Atomic iterator confuses stuff too much, so we risk ICE
972 	     trying to diagnose it further.  */
973 	  continue;
974 	}
975 
976       /* In the case of "for (int i = 0...)", init will be a decl.  It should
977 	 have a DECL_INITIAL that we can turn into an assignment.  */
978       if (init == decl)
979 	{
980 	  elocus = DECL_SOURCE_LOCATION (decl);
981 
982 	  init = DECL_INITIAL (decl);
983 	  if (init == NULL)
984 	    {
985 	      error_at (elocus, "%qE is not initialized", decl);
986 	      init = integer_zero_node;
987 	      fail = true;
988 	    }
989 	  DECL_INITIAL (decl) = NULL_TREE;
990 
991 	  init = build_modify_expr (elocus, decl, NULL_TREE, NOP_EXPR,
992 	      			    /* FIXME diagnostics: This should
993 				       be the location of the INIT.  */
994 	      			    elocus,
995 				    init,
996 				    NULL_TREE);
997 	}
998       if (init != error_mark_node)
999 	{
1000 	  gcc_assert (TREE_CODE (init) == MODIFY_EXPR);
1001 	  gcc_assert (TREE_OPERAND (init, 0) == decl);
1002 	}
1003 
1004       if (cond == NULL_TREE)
1005 	{
1006 	  error_at (elocus, "missing controlling predicate");
1007 	  fail = true;
1008 	}
1009       else
1010 	{
1011 	  bool cond_ok = false;
1012 
1013 	  /* E.g. C sizeof (vla) could add COMPOUND_EXPRs with
1014 	     evaluation of the vla VAR_DECL.  We need to readd
1015 	     them to the non-decl operand.  See PR45784.  */
1016 	  while (TREE_CODE (cond) == COMPOUND_EXPR)
1017 	    cond = TREE_OPERAND (cond, 1);
1018 
1019 	  if (EXPR_HAS_LOCATION (cond))
1020 	    elocus = EXPR_LOCATION (cond);
1021 
1022 	  if (TREE_CODE (cond) == LT_EXPR
1023 	      || TREE_CODE (cond) == LE_EXPR
1024 	      || TREE_CODE (cond) == GT_EXPR
1025 	      || TREE_CODE (cond) == GE_EXPR
1026 	      || TREE_CODE (cond) == NE_EXPR
1027 	      || TREE_CODE (cond) == EQ_EXPR)
1028 	    {
1029 	      tree op0 = TREE_OPERAND (cond, 0);
1030 	      tree op1 = TREE_OPERAND (cond, 1);
1031 
1032 	      /* 2.5.1.  The comparison in the condition is computed in
1033 		 the type of DECL, otherwise the behavior is undefined.
1034 
1035 		 For example:
1036 		 long n; int i;
1037 		 i < n;
1038 
1039 		 according to ISO will be evaluated as:
1040 		 (long)i < n;
1041 
1042 		 We want to force:
1043 		 i < (int)n;  */
1044 	      if (TREE_CODE (op0) == NOP_EXPR
1045 		  && decl == TREE_OPERAND (op0, 0))
1046 		{
1047 		  TREE_OPERAND (cond, 0) = TREE_OPERAND (op0, 0);
1048 		  TREE_OPERAND (cond, 1)
1049 		    = fold_build1_loc (elocus, NOP_EXPR, TREE_TYPE (decl),
1050 				   TREE_OPERAND (cond, 1));
1051 		}
1052 	      else if (TREE_CODE (op1) == NOP_EXPR
1053 		       && decl == TREE_OPERAND (op1, 0))
1054 		{
1055 		  TREE_OPERAND (cond, 1) = TREE_OPERAND (op1, 0);
1056 		  TREE_OPERAND (cond, 0)
1057 		    = fold_build1_loc (elocus, NOP_EXPR, TREE_TYPE (decl),
1058 				   TREE_OPERAND (cond, 0));
1059 		}
1060 
1061 	      if (decl == TREE_OPERAND (cond, 0))
1062 		cond_ok = true;
1063 	      else if (decl == TREE_OPERAND (cond, 1))
1064 		{
1065 		  TREE_SET_CODE (cond,
1066 				 swap_tree_comparison (TREE_CODE (cond)));
1067 		  TREE_OPERAND (cond, 1) = TREE_OPERAND (cond, 0);
1068 		  TREE_OPERAND (cond, 0) = decl;
1069 		  cond_ok = true;
1070 		}
1071 
1072 	      if (TREE_CODE (cond) == NE_EXPR
1073 		  || TREE_CODE (cond) == EQ_EXPR)
1074 		{
1075 		  if (!INTEGRAL_TYPE_P (TREE_TYPE (decl)))
1076 		    {
1077 		      if (code == OACC_LOOP || TREE_CODE (cond) == EQ_EXPR)
1078 			cond_ok = false;
1079 		    }
1080 		  else if (operand_equal_p (TREE_OPERAND (cond, 1),
1081 					    TYPE_MIN_VALUE (TREE_TYPE (decl)),
1082 					    0))
1083 		    TREE_SET_CODE (cond, TREE_CODE (cond) == NE_EXPR
1084 					 ? GT_EXPR : LE_EXPR);
1085 		  else if (operand_equal_p (TREE_OPERAND (cond, 1),
1086 					    TYPE_MAX_VALUE (TREE_TYPE (decl)),
1087 					    0))
1088 		    TREE_SET_CODE (cond, TREE_CODE (cond) == NE_EXPR
1089 					 ? LT_EXPR : GE_EXPR);
1090 		  else if (code == OACC_LOOP || TREE_CODE (cond) == EQ_EXPR)
1091 		    cond_ok = false;
1092 		}
1093 
1094 	      if (cond_ok && TREE_VEC_ELT (condv, i) != cond)
1095 		{
1096 		  tree ce = NULL_TREE, *pce = &ce;
1097 		  tree type = TREE_TYPE (TREE_OPERAND (cond, 1));
1098 		  for (tree c = TREE_VEC_ELT (condv, i); c != cond;
1099 		       c = TREE_OPERAND (c, 1))
1100 		    {
1101 		      *pce = build2 (COMPOUND_EXPR, type, TREE_OPERAND (c, 0),
1102 				     TREE_OPERAND (cond, 1));
1103 		      pce = &TREE_OPERAND (*pce, 1);
1104 		    }
1105 		  TREE_OPERAND (cond, 1) = ce;
1106 		  TREE_VEC_ELT (condv, i) = cond;
1107 		}
1108 	    }
1109 
1110 	  if (!cond_ok)
1111 	    {
1112 	      error_at (elocus, "invalid controlling predicate");
1113 	      fail = true;
1114 	    }
1115 	}
1116 
1117       if (incr == NULL_TREE)
1118 	{
1119 	  error_at (elocus, "missing increment expression");
1120 	  fail = true;
1121 	}
1122       else
1123 	{
1124 	  bool incr_ok = false;
1125 
1126 	  if (EXPR_HAS_LOCATION (incr))
1127 	    elocus = EXPR_LOCATION (incr);
1128 
1129 	  /* Check all the valid increment expressions: v++, v--, ++v, --v,
1130 	     v = v + incr, v = incr + v and v = v - incr.  */
1131 	  switch (TREE_CODE (incr))
1132 	    {
1133 	    case POSTINCREMENT_EXPR:
1134 	    case PREINCREMENT_EXPR:
1135 	    case POSTDECREMENT_EXPR:
1136 	    case PREDECREMENT_EXPR:
1137 	      if (TREE_OPERAND (incr, 0) != decl)
1138 		break;
1139 
1140 	      incr_ok = true;
1141 	      if (!fail
1142 		  && TREE_CODE (cond) == NE_EXPR
1143 		  && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
1144 		  && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
1145 		  && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
1146 		      != INTEGER_CST))
1147 		{
1148 		  /* For pointer to VLA, transform != into < or >
1149 		     depending on whether incr is increment or decrement.  */
1150 		  if (TREE_CODE (incr) == PREINCREMENT_EXPR
1151 		      || TREE_CODE (incr) == POSTINCREMENT_EXPR)
1152 		    TREE_SET_CODE (cond, LT_EXPR);
1153 		  else
1154 		    TREE_SET_CODE (cond, GT_EXPR);
1155 		}
1156 	      incr = c_omp_for_incr_canonicalize_ptr (elocus, decl, incr);
1157 	      break;
1158 
1159 	    case COMPOUND_EXPR:
1160 	      if (TREE_CODE (TREE_OPERAND (incr, 0)) != SAVE_EXPR
1161 		  || TREE_CODE (TREE_OPERAND (incr, 1)) != MODIFY_EXPR)
1162 		break;
1163 	      incr = TREE_OPERAND (incr, 1);
1164 	      /* FALLTHRU */
1165 	    case MODIFY_EXPR:
1166 	      if (TREE_OPERAND (incr, 0) != decl)
1167 		break;
1168 	      if (TREE_OPERAND (incr, 1) == decl)
1169 		break;
1170 	      if (TREE_CODE (TREE_OPERAND (incr, 1)) == PLUS_EXPR
1171 		  && (TREE_OPERAND (TREE_OPERAND (incr, 1), 0) == decl
1172 		      || TREE_OPERAND (TREE_OPERAND (incr, 1), 1) == decl))
1173 		incr_ok = true;
1174 	      else if ((TREE_CODE (TREE_OPERAND (incr, 1)) == MINUS_EXPR
1175 			|| (TREE_CODE (TREE_OPERAND (incr, 1))
1176 			    == POINTER_PLUS_EXPR))
1177 		       && TREE_OPERAND (TREE_OPERAND (incr, 1), 0) == decl)
1178 		incr_ok = true;
1179 	      else
1180 		{
1181 		  tree t = check_omp_for_incr_expr (elocus,
1182 						    TREE_OPERAND (incr, 1),
1183 						    decl);
1184 		  if (t != error_mark_node)
1185 		    {
1186 		      incr_ok = true;
1187 		      t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
1188 		      incr = build2 (MODIFY_EXPR, void_type_node, decl, t);
1189 		    }
1190 		}
1191 	      if (!fail
1192 		  && incr_ok
1193 		  && TREE_CODE (cond) == NE_EXPR)
1194 		{
1195 		  tree i = TREE_OPERAND (incr, 1);
1196 		  i = TREE_OPERAND (i, TREE_OPERAND (i, 0) == decl);
1197 		  i = c_fully_fold (i, false, NULL);
1198 		  if (!final_p
1199 		      && TREE_CODE (i) != INTEGER_CST)
1200 		    ;
1201 		  else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
1202 		    {
1203 		      tree unit
1204 			= TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
1205 		      if (unit)
1206 			{
1207 			  enum tree_code ccode = GT_EXPR;
1208 			  unit = c_fully_fold (unit, false, NULL);
1209 			  i = fold_convert (TREE_TYPE (unit), i);
1210 			  if (operand_equal_p (unit, i, 0))
1211 			    ccode = LT_EXPR;
1212 			  if (ccode == GT_EXPR)
1213 			    {
1214 			      i = fold_unary (NEGATE_EXPR, TREE_TYPE (i), i);
1215 			      if (i == NULL_TREE
1216 				  || !operand_equal_p (unit, i, 0))
1217 				{
1218 				  error_at (elocus,
1219 					    "increment is not constant 1 or "
1220 					    "-1 for %<!=%> condition");
1221 				  fail = true;
1222 				}
1223 			    }
1224 			  if (TREE_CODE (unit) != INTEGER_CST)
1225 			    /* For pointer to VLA, transform != into < or >
1226 			       depending on whether the pointer is
1227 			       incremented or decremented in each
1228 			       iteration.  */
1229 			    TREE_SET_CODE (cond, ccode);
1230 			}
1231 		    }
1232 		  else
1233 		    {
1234 		      if (!integer_onep (i) && !integer_minus_onep (i))
1235 			{
1236 			  error_at (elocus,
1237 				    "increment is not constant 1 or -1 for"
1238 				    " %<!=%> condition");
1239 			  fail = true;
1240 			}
1241 		    }
1242 		}
1243 	      break;
1244 
1245 	    default:
1246 	      break;
1247 	    }
1248 	  if (!incr_ok)
1249 	    {
1250 	      error_at (elocus, "invalid increment expression");
1251 	      fail = true;
1252 	    }
1253 	}
1254 
1255       TREE_VEC_ELT (initv, i) = init;
1256       TREE_VEC_ELT (incrv, i) = incr;
1257     }
1258 
1259   if (fail)
1260     return NULL;
1261   else
1262     {
1263       tree t = make_node (code);
1264 
1265       TREE_TYPE (t) = void_type_node;
1266       OMP_FOR_INIT (t) = initv;
1267       OMP_FOR_COND (t) = condv;
1268       OMP_FOR_INCR (t) = incrv;
1269       OMP_FOR_BODY (t) = body;
1270       OMP_FOR_PRE_BODY (t) = pre_body;
1271       OMP_FOR_ORIG_DECLS (t) = orig_declv;
1272 
1273       SET_EXPR_LOCATION (t, locus);
1274       return t;
1275     }
1276 }
1277 
1278 /* Type for passing data in between c_omp_check_loop_iv and
1279    c_omp_check_loop_iv_r.  */
1280 
1281 struct c_omp_check_loop_iv_data
1282 {
1283   tree declv;
1284   bool fail;
1285   bool maybe_nonrect;
1286   location_t stmt_loc;
1287   location_t expr_loc;
1288   int kind;
1289   int idx;
1290   walk_tree_lh lh;
1291   hash_set<tree> *ppset;
1292 };
1293 
1294 /* Return -1 if DECL is not a loop iterator in loop nest D, otherwise
1295    return the index of the loop in which it is an iterator.
1296    Return TREE_VEC_LENGTH (d->declv) if it is a C++ range for iterator.  */
1297 
1298 static int
c_omp_is_loop_iterator(tree decl,struct c_omp_check_loop_iv_data * d)1299 c_omp_is_loop_iterator (tree decl, struct c_omp_check_loop_iv_data *d)
1300 {
1301   for (int i = 0; i < TREE_VEC_LENGTH (d->declv); i++)
1302     if (decl == TREE_VEC_ELT (d->declv, i)
1303 	|| (TREE_CODE (TREE_VEC_ELT (d->declv, i)) == TREE_LIST
1304 	    && decl == TREE_PURPOSE (TREE_VEC_ELT (d->declv, i))))
1305       return i;
1306     else if (TREE_CODE (TREE_VEC_ELT (d->declv, i)) == TREE_LIST
1307 	     && TREE_CHAIN (TREE_VEC_ELT (d->declv, i))
1308 	     && (TREE_CODE (TREE_CHAIN (TREE_VEC_ELT (d->declv, i)))
1309 		 == TREE_VEC)
1310 	     && decl == TREE_VEC_ELT (TREE_CHAIN (TREE_VEC_ELT (d->declv,
1311 						  i)), 2))
1312       return TREE_VEC_LENGTH (d->declv);
1313   return -1;
1314 }
1315 
1316 /* Helper function called via walk_tree, to diagnose uses
1317    of associated loop IVs inside of lb, b and incr expressions
1318    of OpenMP loops.  */
1319 
1320 static tree
c_omp_check_loop_iv_r(tree * tp,int * walk_subtrees,void * data)1321 c_omp_check_loop_iv_r (tree *tp, int *walk_subtrees, void *data)
1322 {
1323   struct c_omp_check_loop_iv_data *d
1324     = (struct c_omp_check_loop_iv_data *) data;
1325   if (DECL_P (*tp))
1326     {
1327       int idx = c_omp_is_loop_iterator (*tp, d);
1328       if (idx == -1)
1329 	return NULL_TREE;
1330 
1331       if ((d->kind & 4) && idx < d->idx)
1332 	{
1333 	  d->maybe_nonrect = true;
1334 	  return NULL_TREE;
1335 	}
1336 
1337       if (d->ppset->add (*tp))
1338 	return NULL_TREE;
1339 
1340       location_t loc = d->expr_loc;
1341       if (loc == UNKNOWN_LOCATION)
1342 	loc = d->stmt_loc;
1343 
1344       switch (d->kind & 3)
1345 	{
1346 	case 0:
1347 	  error_at (loc, "initializer expression refers to "
1348 			 "iteration variable %qD", *tp);
1349 	  break;
1350 	case 1:
1351 	  error_at (loc, "condition expression refers to "
1352 			 "iteration variable %qD", *tp);
1353 	  break;
1354 	case 2:
1355 	  error_at (loc, "increment expression refers to "
1356 			 "iteration variable %qD", *tp);
1357 	  break;
1358 	}
1359       d->fail = true;
1360     }
1361   else if ((d->kind & 4)
1362 	   && TREE_CODE (*tp) != TREE_VEC
1363 	   && TREE_CODE (*tp) != PLUS_EXPR
1364 	   && TREE_CODE (*tp) != MINUS_EXPR
1365 	   && TREE_CODE (*tp) != MULT_EXPR
1366 	   && TREE_CODE (*tp) != POINTER_PLUS_EXPR
1367 	   && !CONVERT_EXPR_P (*tp))
1368     {
1369       *walk_subtrees = 0;
1370       d->kind &= 3;
1371       walk_tree_1 (tp, c_omp_check_loop_iv_r, data, NULL, d->lh);
1372       d->kind |= 4;
1373       return NULL_TREE;
1374     }
1375   else if (d->ppset->add (*tp))
1376     *walk_subtrees = 0;
1377   /* Don't walk dtors added by C++ wrap_cleanups_r.  */
1378   else if (TREE_CODE (*tp) == TRY_CATCH_EXPR
1379 	   && TRY_CATCH_IS_CLEANUP (*tp))
1380     {
1381       *walk_subtrees = 0;
1382       return walk_tree_1 (&TREE_OPERAND (*tp, 0), c_omp_check_loop_iv_r, data,
1383 			  NULL, d->lh);
1384     }
1385 
1386   return NULL_TREE;
1387 }
1388 
1389 /* Check the allowed expressions for non-rectangular loop nest lb and b
1390    expressions.  Return the outer var decl referenced in the expression.  */
1391 
1392 static tree
c_omp_check_nonrect_loop_iv(tree * tp,struct c_omp_check_loop_iv_data * d,walk_tree_lh lh)1393 c_omp_check_nonrect_loop_iv (tree *tp, struct c_omp_check_loop_iv_data *d,
1394 			     walk_tree_lh lh)
1395 {
1396   d->maybe_nonrect = false;
1397   if (d->fail)
1398     return NULL_TREE;
1399 
1400   hash_set<tree> pset;
1401   hash_set<tree> *ppset = d->ppset;
1402   d->ppset = &pset;
1403 
1404   tree t = *tp;
1405   if (TREE_CODE (t) == TREE_VEC
1406       && TREE_VEC_LENGTH (t) == 3
1407       && DECL_P (TREE_VEC_ELT (t, 0))
1408       && c_omp_is_loop_iterator (TREE_VEC_ELT (t, 0), d) >= 0)
1409     {
1410       d->kind &= 3;
1411       walk_tree_1 (&TREE_VEC_ELT (t, 1), c_omp_check_loop_iv_r, d, NULL, lh);
1412       walk_tree_1 (&TREE_VEC_ELT (t, 1), c_omp_check_loop_iv_r, d, NULL, lh);
1413       d->ppset = ppset;
1414       return d->fail ? NULL_TREE : TREE_VEC_ELT (t, 0);
1415     }
1416 
1417   while (CONVERT_EXPR_P (t))
1418     t = TREE_OPERAND (t, 0);
1419 
1420   tree a1 = t, a2 = integer_zero_node;
1421   bool neg_a1 = false, neg_a2 = false;
1422   switch (TREE_CODE (t))
1423     {
1424     case PLUS_EXPR:
1425     case MINUS_EXPR:
1426       a1 = TREE_OPERAND (t, 0);
1427       a2 = TREE_OPERAND (t, 1);
1428       while (CONVERT_EXPR_P (a1))
1429 	a1 = TREE_OPERAND (a1, 0);
1430       while (CONVERT_EXPR_P (a2))
1431 	a2 = TREE_OPERAND (a2, 0);
1432       if (DECL_P (a1) && c_omp_is_loop_iterator (a1, d) >= 0)
1433 	{
1434 	  a2 = TREE_OPERAND (t, 1);
1435 	  if (TREE_CODE (t) == MINUS_EXPR)
1436 	    neg_a2 = true;
1437 	  t = a1;
1438 	  break;
1439 	}
1440       if (DECL_P (a2) && c_omp_is_loop_iterator (a2, d) >= 0)
1441 	{
1442 	  a1 = TREE_OPERAND (t, 0);
1443 	  if (TREE_CODE (t) == MINUS_EXPR)
1444 	    neg_a1 = true;
1445 	  t = a2;
1446 	  a2 = a1;
1447 	  break;
1448 	}
1449       if (TREE_CODE (a1) == MULT_EXPR && TREE_CODE (a2) == MULT_EXPR)
1450 	{
1451 	  tree o1 = TREE_OPERAND (a1, 0);
1452 	  tree o2 = TREE_OPERAND (a1, 1);
1453 	  while (CONVERT_EXPR_P (o1))
1454 	    o1 = TREE_OPERAND (o1, 0);
1455 	  while (CONVERT_EXPR_P (o2))
1456 	    o2 = TREE_OPERAND (o2, 0);
1457 	  if ((DECL_P (o1) && c_omp_is_loop_iterator (o1, d) >= 0)
1458 	      || (DECL_P (o2) && c_omp_is_loop_iterator (o2, d) >= 0))
1459 	    {
1460 	      a2 = TREE_OPERAND (t, 1);
1461 	      if (TREE_CODE (t) == MINUS_EXPR)
1462 		neg_a2 = true;
1463 	      t = a1;
1464 	      break;
1465 	    }
1466 	}
1467       if (TREE_CODE (a2) == MULT_EXPR)
1468 	{
1469 	  a1 = TREE_OPERAND (t, 0);
1470 	  if (TREE_CODE (t) == MINUS_EXPR)
1471 	    neg_a1 = true;
1472 	  t = a2;
1473 	  a2 = a1;
1474 	  break;
1475 	}
1476       if (TREE_CODE (a1) == MULT_EXPR)
1477 	{
1478 	  a2 = TREE_OPERAND (t, 1);
1479 	  if (TREE_CODE (t) == MINUS_EXPR)
1480 	    neg_a2 = true;
1481 	  t = a1;
1482 	  break;
1483 	}
1484       a2 = integer_zero_node;
1485       break;
1486     case POINTER_PLUS_EXPR:
1487       a1 = TREE_OPERAND (t, 0);
1488       a2 = TREE_OPERAND (t, 1);
1489       while (CONVERT_EXPR_P (a1))
1490 	a1 = TREE_OPERAND (a1, 0);
1491       if (DECL_P (a1) && c_omp_is_loop_iterator (a1, d) >= 0)
1492 	{
1493 	  a2 = TREE_OPERAND (t, 1);
1494 	  t = a1;
1495 	  break;
1496 	}
1497       break;
1498     default:
1499       break;
1500     }
1501 
1502   a1 = integer_one_node;
1503   if (TREE_CODE (t) == MULT_EXPR)
1504     {
1505       tree o1 = TREE_OPERAND (t, 0);
1506       tree o2 = TREE_OPERAND (t, 1);
1507       while (CONVERT_EXPR_P (o1))
1508 	o1 = TREE_OPERAND (o1, 0);
1509       while (CONVERT_EXPR_P (o2))
1510 	o2 = TREE_OPERAND (o2, 0);
1511       if (DECL_P (o1) && c_omp_is_loop_iterator (o1, d) >= 0)
1512 	{
1513 	  a1 = TREE_OPERAND (t, 1);
1514 	  t = o1;
1515 	}
1516       else if (DECL_P (o2) && c_omp_is_loop_iterator (o2, d) >= 0)
1517 	{
1518 	  a1 = TREE_OPERAND (t, 0);
1519 	  t = o2;
1520 	}
1521     }
1522 
1523   d->kind &= 3;
1524   tree ret = NULL_TREE;
1525   if (DECL_P (t) && c_omp_is_loop_iterator (t, d) >= 0)
1526     {
1527       location_t loc = d->expr_loc;
1528       if (loc == UNKNOWN_LOCATION)
1529 	loc = d->stmt_loc;
1530       if (!lang_hooks.types_compatible_p (TREE_TYPE (*tp), TREE_TYPE (t)))
1531 	{
1532 	  if (d->kind == 0)
1533 	    error_at (loc, "outer iteration variable %qD used in initializer"
1534 			   " expression has type other than %qT",
1535 		      t, TREE_TYPE (*tp));
1536 	  else
1537 	    error_at (loc, "outer iteration variable %qD used in condition"
1538 			   " expression has type other than %qT",
1539 		      t, TREE_TYPE (*tp));
1540 	  d->fail = true;
1541 	}
1542       else if (!INTEGRAL_TYPE_P (TREE_TYPE (a1)))
1543 	{
1544 	  error_at (loc, "outer iteration variable %qD multiplier expression"
1545 			 " %qE is not integral", t, a1);
1546 	  d->fail = true;
1547 	}
1548       else if (!INTEGRAL_TYPE_P (TREE_TYPE (a2)))
1549 	{
1550 	  error_at (loc, "outer iteration variable %qD addend expression"
1551 			 " %qE is not integral", t, a2);
1552 	  d->fail = true;
1553 	}
1554       else
1555 	{
1556 	  walk_tree_1 (&a1, c_omp_check_loop_iv_r, d, NULL, lh);
1557 	  walk_tree_1 (&a2, c_omp_check_loop_iv_r, d, NULL, lh);
1558         }
1559       if (!d->fail)
1560 	{
1561 	  a1 = fold_convert (TREE_TYPE (*tp), a1);
1562 	  a2 = fold_convert (TREE_TYPE (*tp), a2);
1563 	  if (neg_a1)
1564 	    a1 = fold_build1 (NEGATE_EXPR, TREE_TYPE (a1), a1);
1565 	  if (neg_a2)
1566 	    a2 = fold_build1 (NEGATE_EXPR, TREE_TYPE (a2), a2);
1567 	  ret = t;
1568 	  *tp = make_tree_vec (3);
1569 	  TREE_VEC_ELT (*tp, 0) = t;
1570 	  TREE_VEC_ELT (*tp, 1) = a1;
1571 	  TREE_VEC_ELT (*tp, 2) = a2;
1572 	}
1573     }
1574   else
1575     walk_tree_1 (&t, c_omp_check_loop_iv_r, d, NULL, lh);
1576 
1577   d->ppset = ppset;
1578   return ret;
1579 }
1580 
1581 /* Diagnose invalid references to loop iterators in lb, b and incr
1582    expressions.  */
1583 
1584 bool
c_omp_check_loop_iv(tree stmt,tree declv,walk_tree_lh lh)1585 c_omp_check_loop_iv (tree stmt, tree declv, walk_tree_lh lh)
1586 {
1587   hash_set<tree> pset;
1588   struct c_omp_check_loop_iv_data data;
1589   int i;
1590 
1591   data.declv = declv;
1592   data.fail = false;
1593   data.maybe_nonrect = false;
1594   data.stmt_loc = EXPR_LOCATION (stmt);
1595   data.lh = lh;
1596   data.ppset = &pset;
1597   for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (stmt)); i++)
1598     {
1599       tree init = TREE_VEC_ELT (OMP_FOR_INIT (stmt), i);
1600       gcc_assert (TREE_CODE (init) == MODIFY_EXPR);
1601       tree decl = TREE_OPERAND (init, 0);
1602       tree cond = TREE_VEC_ELT (OMP_FOR_COND (stmt), i);
1603       gcc_assert (COMPARISON_CLASS_P (cond));
1604       gcc_assert (TREE_OPERAND (cond, 0) == decl);
1605       tree incr = TREE_VEC_ELT (OMP_FOR_INCR (stmt), i);
1606       data.expr_loc = EXPR_LOCATION (TREE_OPERAND (init, 1));
1607       tree vec_outer1 = NULL_TREE, vec_outer2 = NULL_TREE;
1608       int kind = 0;
1609       if (i > 0
1610 	  && (unsigned) c_omp_is_loop_iterator (decl, &data) < (unsigned) i)
1611 	{
1612 	  location_t loc = data.expr_loc;
1613 	  if (loc == UNKNOWN_LOCATION)
1614 	    loc = data.stmt_loc;
1615 	  error_at (loc, "the same loop iteration variables %qD used in "
1616 			 "multiple associated loops", decl);
1617 	  data.fail = true;
1618 	}
1619       /* Handle non-rectangular loop nests.  */
1620       if (TREE_CODE (stmt) != OACC_LOOP && i > 0)
1621 	kind = 4;
1622       data.kind = kind;
1623       data.idx = i;
1624       walk_tree_1 (&TREE_OPERAND (init, 1),
1625 		   c_omp_check_loop_iv_r, &data, NULL, lh);
1626       if (data.maybe_nonrect)
1627 	vec_outer1 = c_omp_check_nonrect_loop_iv (&TREE_OPERAND (init, 1),
1628 						  &data, lh);
1629       /* Don't warn for C++ random access iterators here, the
1630 	 expression then involves the subtraction and always refers
1631 	 to the original value.  The C++ FE needs to warn on those
1632 	 earlier.  */
1633       if (decl == TREE_VEC_ELT (declv, i)
1634 	  || (TREE_CODE (TREE_VEC_ELT (declv, i)) == TREE_LIST
1635 	      && decl == TREE_PURPOSE (TREE_VEC_ELT (declv, i))))
1636 	{
1637 	  data.expr_loc = EXPR_LOCATION (cond);
1638 	  data.kind = kind | 1;
1639 	  walk_tree_1 (&TREE_OPERAND (cond, 1),
1640 		       c_omp_check_loop_iv_r, &data, NULL, lh);
1641 	  if (data.maybe_nonrect)
1642 	    vec_outer2 = c_omp_check_nonrect_loop_iv (&TREE_OPERAND (cond, 1),
1643 						      &data, lh);
1644 	}
1645       if (vec_outer1 && vec_outer2 && vec_outer1 != vec_outer2)
1646 	{
1647 	  location_t loc = data.expr_loc;
1648 	  if (loc == UNKNOWN_LOCATION)
1649 	    loc = data.stmt_loc;
1650 	  error_at (loc, "two different outer iteration variables %qD and %qD"
1651 			 " used in a single loop", vec_outer1, vec_outer2);
1652 	  data.fail = true;
1653 	}
1654       if (vec_outer1 || vec_outer2)
1655 	OMP_FOR_NON_RECTANGULAR (stmt) = 1;
1656       if (TREE_CODE (incr) == MODIFY_EXPR)
1657 	{
1658 	  gcc_assert (TREE_OPERAND (incr, 0) == decl);
1659 	  incr = TREE_OPERAND (incr, 1);
1660 	  data.kind = 2;
1661 	  if (TREE_CODE (incr) == PLUS_EXPR
1662 	      && TREE_OPERAND (incr, 1) == decl)
1663 	    {
1664 	      data.expr_loc = EXPR_LOCATION (TREE_OPERAND (incr, 0));
1665 	      walk_tree_1 (&TREE_OPERAND (incr, 0),
1666 			   c_omp_check_loop_iv_r, &data, NULL, lh);
1667 	    }
1668 	  else
1669 	    {
1670 	      data.expr_loc = EXPR_LOCATION (TREE_OPERAND (incr, 1));
1671 	      walk_tree_1 (&TREE_OPERAND (incr, 1),
1672 			   c_omp_check_loop_iv_r, &data, NULL, lh);
1673 	    }
1674 	}
1675     }
1676   return !data.fail;
1677 }
1678 
1679 /* Similar, but allows to check the init or cond expressions individually.  */
1680 
1681 bool
c_omp_check_loop_iv_exprs(location_t stmt_loc,enum tree_code code,tree declv,int i,tree decl,tree init,tree cond,walk_tree_lh lh)1682 c_omp_check_loop_iv_exprs (location_t stmt_loc, enum tree_code code,
1683 			   tree declv, int i, tree decl, tree init, tree cond,
1684 			   walk_tree_lh lh)
1685 {
1686   hash_set<tree> pset;
1687   struct c_omp_check_loop_iv_data data;
1688   int kind = (code != OACC_LOOP && i > 0) ? 4 : 0;
1689 
1690   data.declv = declv;
1691   data.fail = false;
1692   data.maybe_nonrect = false;
1693   data.stmt_loc = stmt_loc;
1694   data.lh = lh;
1695   data.ppset = &pset;
1696   data.idx = i;
1697   if (i > 0
1698       && (unsigned) c_omp_is_loop_iterator (decl, &data) < (unsigned) i)
1699     {
1700       error_at (stmt_loc, "the same loop iteration variables %qD used in "
1701       			  "multiple associated loops", decl);
1702       data.fail = true;
1703     }
1704   if (init)
1705     {
1706       data.expr_loc = EXPR_LOCATION (init);
1707       data.kind = kind;
1708       walk_tree_1 (&init,
1709 		   c_omp_check_loop_iv_r, &data, NULL, lh);
1710     }
1711   if (cond)
1712     {
1713       gcc_assert (COMPARISON_CLASS_P (cond));
1714       data.expr_loc = EXPR_LOCATION (init);
1715       data.kind = kind | 1;
1716       if (TREE_OPERAND (cond, 0) == decl)
1717 	walk_tree_1 (&TREE_OPERAND (cond, 1),
1718 		     c_omp_check_loop_iv_r, &data, NULL, lh);
1719       else
1720 	walk_tree_1 (&TREE_OPERAND (cond, 0),
1721 		     c_omp_check_loop_iv_r, &data, NULL, lh);
1722     }
1723   return !data.fail;
1724 }
1725 
1726 /* This function splits clauses for OpenACC combined loop
1727    constructs.  OpenACC combined loop constructs are:
1728    #pragma acc kernels loop
1729    #pragma acc parallel loop  */
1730 
1731 tree
c_oacc_split_loop_clauses(tree clauses,tree * not_loop_clauses,bool is_parallel)1732 c_oacc_split_loop_clauses (tree clauses, tree *not_loop_clauses,
1733 			   bool is_parallel)
1734 {
1735   tree next, loop_clauses, nc;
1736 
1737   loop_clauses = *not_loop_clauses = NULL_TREE;
1738   for (; clauses ; clauses = next)
1739     {
1740       next = OMP_CLAUSE_CHAIN (clauses);
1741 
1742       switch (OMP_CLAUSE_CODE (clauses))
1743         {
1744 	  /* Loop clauses.  */
1745 	case OMP_CLAUSE_COLLAPSE:
1746 	case OMP_CLAUSE_TILE:
1747 	case OMP_CLAUSE_GANG:
1748 	case OMP_CLAUSE_WORKER:
1749 	case OMP_CLAUSE_VECTOR:
1750 	case OMP_CLAUSE_AUTO:
1751 	case OMP_CLAUSE_SEQ:
1752 	case OMP_CLAUSE_INDEPENDENT:
1753 	case OMP_CLAUSE_PRIVATE:
1754 	  OMP_CLAUSE_CHAIN (clauses) = loop_clauses;
1755 	  loop_clauses = clauses;
1756 	  break;
1757 
1758 	  /* Reductions must be duplicated on both constructs.  */
1759 	case OMP_CLAUSE_REDUCTION:
1760 	  if (is_parallel)
1761 	    {
1762 	      nc = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
1763 				     OMP_CLAUSE_REDUCTION);
1764 	      OMP_CLAUSE_DECL (nc) = OMP_CLAUSE_DECL (clauses);
1765 	      OMP_CLAUSE_REDUCTION_CODE (nc)
1766 		= OMP_CLAUSE_REDUCTION_CODE (clauses);
1767 	      OMP_CLAUSE_CHAIN (nc) = *not_loop_clauses;
1768 	      *not_loop_clauses = nc;
1769 	    }
1770 
1771 	  OMP_CLAUSE_CHAIN (clauses) = loop_clauses;
1772 	  loop_clauses = clauses;
1773 	  break;
1774 
1775 	  /* Parallel/kernels clauses.  */
1776 	default:
1777 	  OMP_CLAUSE_CHAIN (clauses) = *not_loop_clauses;
1778 	  *not_loop_clauses = clauses;
1779 	  break;
1780 	}
1781     }
1782 
1783   return loop_clauses;
1784 }
1785 
1786 /* This function attempts to split or duplicate clauses for OpenMP
1787    combined/composite constructs.  Right now there are 30 different
1788    constructs.  CODE is the innermost construct in the combined construct,
1789    and MASK allows to determine which constructs are combined together,
1790    as every construct has at least one clause that no other construct
1791    has (except for OMP_SECTIONS, but that can be only combined with parallel,
1792    and OMP_MASTER, which doesn't have any clauses at all).
1793    OpenMP combined/composite constructs are:
1794    #pragma omp distribute parallel for
1795    #pragma omp distribute parallel for simd
1796    #pragma omp distribute simd
1797    #pragma omp for simd
1798    #pragma omp masked taskloop
1799    #pragma omp masked taskloop simd
1800    #pragma omp master taskloop
1801    #pragma omp master taskloop simd
1802    #pragma omp parallel for
1803    #pragma omp parallel for simd
1804    #pragma omp parallel loop
1805    #pragma omp parallel masked
1806    #pragma omp parallel masked taskloop
1807    #pragma omp parallel masked taskloop simd
1808    #pragma omp parallel master
1809    #pragma omp parallel master taskloop
1810    #pragma omp parallel master taskloop simd
1811    #pragma omp parallel sections
1812    #pragma omp target parallel
1813    #pragma omp target parallel for
1814    #pragma omp target parallel for simd
1815    #pragma omp target parallel loop
1816    #pragma omp target teams
1817    #pragma omp target teams distribute
1818    #pragma omp target teams distribute parallel for
1819    #pragma omp target teams distribute parallel for simd
1820    #pragma omp target teams distribute simd
1821    #pragma omp target teams loop
1822    #pragma omp target simd
1823    #pragma omp taskloop simd
1824    #pragma omp teams distribute
1825    #pragma omp teams distribute parallel for
1826    #pragma omp teams distribute parallel for simd
1827    #pragma omp teams distribute simd
1828    #pragma omp teams loop  */
1829 
1830 void
c_omp_split_clauses(location_t loc,enum tree_code code,omp_clause_mask mask,tree clauses,tree * cclauses)1831 c_omp_split_clauses (location_t loc, enum tree_code code,
1832 		     omp_clause_mask mask, tree clauses, tree *cclauses)
1833 {
1834   tree next, c;
1835   enum c_omp_clause_split s;
1836   int i;
1837   bool has_dup_allocate = false;
1838 
1839   for (i = 0; i < C_OMP_CLAUSE_SPLIT_COUNT; i++)
1840     cclauses[i] = NULL;
1841   /* Add implicit nowait clause on
1842      #pragma omp parallel {for,for simd,sections}.  */
1843   if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) != 0)
1844     switch (code)
1845       {
1846       case OMP_FOR:
1847       case OMP_SIMD:
1848 	if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)) != 0)
1849 	  cclauses[C_OMP_CLAUSE_SPLIT_FOR]
1850 	    = build_omp_clause (loc, OMP_CLAUSE_NOWAIT);
1851 	break;
1852       case OMP_SECTIONS:
1853 	cclauses[C_OMP_CLAUSE_SPLIT_SECTIONS]
1854 	  = build_omp_clause (loc, OMP_CLAUSE_NOWAIT);
1855 	break;
1856       default:
1857 	break;
1858       }
1859 
1860   for (; clauses ; clauses = next)
1861     {
1862       next = OMP_CLAUSE_CHAIN (clauses);
1863 
1864       switch (OMP_CLAUSE_CODE (clauses))
1865 	{
1866 	/* First the clauses that are unique to some constructs.  */
1867 	case OMP_CLAUSE_DEVICE:
1868 	case OMP_CLAUSE_MAP:
1869 	case OMP_CLAUSE_IS_DEVICE_PTR:
1870 	case OMP_CLAUSE_HAS_DEVICE_ADDR:
1871 	case OMP_CLAUSE_DEFAULTMAP:
1872 	case OMP_CLAUSE_DEPEND:
1873 	  s = C_OMP_CLAUSE_SPLIT_TARGET;
1874 	  break;
1875 	case OMP_CLAUSE_NUM_TEAMS:
1876 	  s = C_OMP_CLAUSE_SPLIT_TEAMS;
1877 	  break;
1878 	case OMP_CLAUSE_DIST_SCHEDULE:
1879 	  s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
1880 	  break;
1881 	case OMP_CLAUSE_COPYIN:
1882 	case OMP_CLAUSE_NUM_THREADS:
1883 	case OMP_CLAUSE_PROC_BIND:
1884 	  s = C_OMP_CLAUSE_SPLIT_PARALLEL;
1885 	  break;
1886 	case OMP_CLAUSE_ORDERED:
1887 	  s = C_OMP_CLAUSE_SPLIT_FOR;
1888 	  break;
1889 	case OMP_CLAUSE_SCHEDULE:
1890 	  s = C_OMP_CLAUSE_SPLIT_FOR;
1891 	  if (code != OMP_SIMD)
1892 	    OMP_CLAUSE_SCHEDULE_SIMD (clauses) = 0;
1893 	  break;
1894 	case OMP_CLAUSE_SAFELEN:
1895 	case OMP_CLAUSE_SIMDLEN:
1896 	case OMP_CLAUSE_ALIGNED:
1897 	case OMP_CLAUSE_NONTEMPORAL:
1898 	  s = C_OMP_CLAUSE_SPLIT_SIMD;
1899 	  break;
1900 	case OMP_CLAUSE_GRAINSIZE:
1901 	case OMP_CLAUSE_NUM_TASKS:
1902 	case OMP_CLAUSE_FINAL:
1903 	case OMP_CLAUSE_UNTIED:
1904 	case OMP_CLAUSE_MERGEABLE:
1905 	case OMP_CLAUSE_NOGROUP:
1906 	case OMP_CLAUSE_PRIORITY:
1907 	  s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
1908 	  break;
1909 	case OMP_CLAUSE_BIND:
1910 	  s = C_OMP_CLAUSE_SPLIT_LOOP;
1911 	  break;
1912 	case OMP_CLAUSE_FILTER:
1913 	  s = C_OMP_CLAUSE_SPLIT_MASKED;
1914 	  break;
1915 	/* Duplicate this to all of taskloop, distribute, for, simd and
1916 	   loop.  */
1917 	case OMP_CLAUSE_COLLAPSE:
1918 	  if (code == OMP_SIMD)
1919 	    {
1920 	      if ((mask & ((OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)
1921 			   | (OMP_CLAUSE_MASK_1
1922 			      << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)
1923 			   | (OMP_CLAUSE_MASK_1
1924 			      << PRAGMA_OMP_CLAUSE_NOGROUP))) != 0)
1925 		{
1926 		  c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
1927 					OMP_CLAUSE_COLLAPSE);
1928 		  OMP_CLAUSE_COLLAPSE_EXPR (c)
1929 		    = OMP_CLAUSE_COLLAPSE_EXPR (clauses);
1930 		  OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_SIMD];
1931 		  cclauses[C_OMP_CLAUSE_SPLIT_SIMD] = c;
1932 		}
1933 	      else
1934 		{
1935 		  /* This must be #pragma omp target simd */
1936 		  s = C_OMP_CLAUSE_SPLIT_SIMD;
1937 		  break;
1938 		}
1939 	    }
1940 	  if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)) != 0)
1941 	    {
1942 	      if ((mask & (OMP_CLAUSE_MASK_1
1943 			   << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)) != 0)
1944 		{
1945 		  c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
1946 					OMP_CLAUSE_COLLAPSE);
1947 		  OMP_CLAUSE_COLLAPSE_EXPR (c)
1948 		    = OMP_CLAUSE_COLLAPSE_EXPR (clauses);
1949 		  OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_FOR];
1950 		  cclauses[C_OMP_CLAUSE_SPLIT_FOR] = c;
1951 		  s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
1952 		}
1953 	      else
1954 		s = C_OMP_CLAUSE_SPLIT_FOR;
1955 	    }
1956 	  else if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP))
1957 		   != 0)
1958 	    s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
1959 	  else if (code == OMP_LOOP)
1960 	    s = C_OMP_CLAUSE_SPLIT_LOOP;
1961 	  else
1962 	    s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
1963 	  break;
1964 	/* Private clause is supported on all constructs but master/masked,
1965 	   it is enough to put it on the innermost one other than
1966 	   master/masked.  For #pragma omp {for,sections} put it on parallel
1967 	   though, as that's what we did for OpenMP 3.1.  */
1968 	case OMP_CLAUSE_PRIVATE:
1969 	  switch (code)
1970 	    {
1971 	    case OMP_SIMD: s = C_OMP_CLAUSE_SPLIT_SIMD; break;
1972 	    case OMP_FOR: case OMP_SECTIONS:
1973 	    case OMP_PARALLEL: s = C_OMP_CLAUSE_SPLIT_PARALLEL; break;
1974 	    case OMP_DISTRIBUTE: s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE; break;
1975 	    case OMP_TEAMS: s = C_OMP_CLAUSE_SPLIT_TEAMS; break;
1976 	    case OMP_MASTER: s = C_OMP_CLAUSE_SPLIT_PARALLEL; break;
1977 	    case OMP_MASKED: s = C_OMP_CLAUSE_SPLIT_PARALLEL; break;
1978 	    case OMP_TASKLOOP: s = C_OMP_CLAUSE_SPLIT_TASKLOOP; break;
1979 	    case OMP_LOOP: s = C_OMP_CLAUSE_SPLIT_LOOP; break;
1980 	    default: gcc_unreachable ();
1981 	    }
1982 	  break;
1983 	/* Firstprivate clause is supported on all constructs but
1984 	   simd, master, masked and loop.  Put it on the outermost of those
1985 	   and duplicate on teams and parallel.  */
1986 	case OMP_CLAUSE_FIRSTPRIVATE:
1987 	  if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP))
1988 	      != 0)
1989 	    {
1990 	      if (code == OMP_SIMD
1991 		  && (mask & ((OMP_CLAUSE_MASK_1
1992 			       << PRAGMA_OMP_CLAUSE_NUM_THREADS)
1993 			      | (OMP_CLAUSE_MASK_1
1994 				 << PRAGMA_OMP_CLAUSE_NUM_TEAMS))) == 0)
1995 		{
1996 		  /* This must be #pragma omp target simd.  */
1997 		  s = C_OMP_CLAUSE_SPLIT_TARGET;
1998 		  OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clauses) = 1;
1999 		  OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (clauses) = 1;
2000 		  break;
2001 		}
2002 	      c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2003 				    OMP_CLAUSE_FIRSTPRIVATE);
2004 	      /* firstprivate should not be applied to target if it is
2005 		 also lastprivate or on the combined/composite construct,
2006 		 or if it is mentioned in map clause.  OMP_CLAUSE_DECLs
2007 		 may need to go through FE handling though (instantiation,
2008 		 C++ non-static data members, array section lowering), so
2009 		 add the clause with OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT and
2010 		 let *finish_omp_clauses and the gimplifier handle it
2011 		 right.  */
2012 	      OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 1;
2013 	      OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2014 	      OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TARGET];
2015 	      cclauses[C_OMP_CLAUSE_SPLIT_TARGET] = c;
2016 	    }
2017 	  if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS))
2018 	      != 0)
2019 	    {
2020 	      if ((mask & ((OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_TEAMS)
2021 			   | (OMP_CLAUSE_MASK_1
2022 			      << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE))) != 0)
2023 		{
2024 		  c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2025 					OMP_CLAUSE_FIRSTPRIVATE);
2026 		  OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2027 		  OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_PARALLEL];
2028 		  cclauses[C_OMP_CLAUSE_SPLIT_PARALLEL] = c;
2029 		  if ((mask & (OMP_CLAUSE_MASK_1
2030 			       << PRAGMA_OMP_CLAUSE_NUM_TEAMS)) != 0)
2031 		    s = C_OMP_CLAUSE_SPLIT_TEAMS;
2032 		  else
2033 		    s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
2034 		}
2035 	      else if ((mask & (OMP_CLAUSE_MASK_1
2036 				<< PRAGMA_OMP_CLAUSE_NOGROUP)) != 0)
2037 		/* This must be
2038 		   #pragma omp parallel mas{ked,ter} taskloop{, simd}.  */
2039 		s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2040 	      else
2041 		/* This must be
2042 		   #pragma omp parallel{, for{, simd}, sections,loop}
2043 		   or
2044 		   #pragma omp target parallel.  */
2045 		s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2046 	    }
2047 	  else if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_TEAMS))
2048 		   != 0)
2049 	    {
2050 	      /* This must be one of
2051 		 #pragma omp {,target }teams {distribute,loop}
2052 		 #pragma omp target teams
2053 		 #pragma omp {,target }teams distribute simd.  */
2054 	      gcc_assert (code == OMP_DISTRIBUTE
2055 			  || code == OMP_LOOP
2056 			  || code == OMP_TEAMS
2057 			  || code == OMP_SIMD);
2058 	      s = C_OMP_CLAUSE_SPLIT_TEAMS;
2059 	    }
2060 	  else if ((mask & (OMP_CLAUSE_MASK_1
2061 			    << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)) != 0)
2062 	    {
2063 	      /* This must be #pragma omp distribute simd.  */
2064 	      gcc_assert (code == OMP_SIMD);
2065 	      s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
2066 	    }
2067 	  else if ((mask & (OMP_CLAUSE_MASK_1
2068 			    << PRAGMA_OMP_CLAUSE_NOGROUP)) != 0)
2069 	    {
2070 	      /* This must be
2071 		 #pragma omp {,{,parallel }mas{ked,ter} }taskloop simd
2072 		 or
2073 		 #pragma omp {,parallel }mas{ked,ter} taskloop.  */
2074 	      gcc_assert (code == OMP_SIMD || code == OMP_TASKLOOP);
2075 	      s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2076 	    }
2077 	  else
2078 	    {
2079 	      /* This must be #pragma omp for simd.  */
2080 	      gcc_assert (code == OMP_SIMD);
2081 	      s = C_OMP_CLAUSE_SPLIT_FOR;
2082 	    }
2083 	  break;
2084 	/* Lastprivate is allowed on distribute, for, sections, taskloop, loop
2085 	   and simd.  In parallel {for{, simd},sections} we actually want to
2086 	   put it on parallel rather than for or sections.  */
2087 	case OMP_CLAUSE_LASTPRIVATE:
2088 	  if (code == OMP_DISTRIBUTE)
2089 	    {
2090 	      s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
2091 	      break;
2092 	    }
2093 	  if ((mask & (OMP_CLAUSE_MASK_1
2094 		       << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)) != 0)
2095 	    {
2096 	      c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2097 				    OMP_CLAUSE_LASTPRIVATE);
2098 	      OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2099 	      OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_DISTRIBUTE];
2100 	      OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
2101 		= OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (clauses);
2102 	      cclauses[C_OMP_CLAUSE_SPLIT_DISTRIBUTE] = c;
2103 	    }
2104 	  if (code == OMP_FOR || code == OMP_SECTIONS)
2105 	    {
2106 	      if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS))
2107 		  != 0)
2108 		s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2109 	      else
2110 		s = C_OMP_CLAUSE_SPLIT_FOR;
2111 	      break;
2112 	    }
2113 	  if (code == OMP_TASKLOOP)
2114 	    {
2115 	      s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2116 	      break;
2117 	    }
2118 	  if (code == OMP_LOOP)
2119 	    {
2120 	      s = C_OMP_CLAUSE_SPLIT_LOOP;
2121 	      break;
2122 	    }
2123 	  gcc_assert (code == OMP_SIMD);
2124 	  if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)) != 0)
2125 	    {
2126 	      c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2127 				    OMP_CLAUSE_LASTPRIVATE);
2128 	      OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2129 	      OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
2130 		= OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (clauses);
2131 	      if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS))
2132 		  != 0)
2133 		s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2134 	      else
2135 		s = C_OMP_CLAUSE_SPLIT_FOR;
2136 	      OMP_CLAUSE_CHAIN (c) = cclauses[s];
2137 	      cclauses[s] = c;
2138 	    }
2139 	  if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP)) != 0)
2140 	    {
2141 	      c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2142 				    OMP_CLAUSE_LASTPRIVATE);
2143 	      OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2144 	      OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
2145 		= OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (clauses);
2146 	      OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TASKLOOP];
2147 	      cclauses[C_OMP_CLAUSE_SPLIT_TASKLOOP] = c;
2148 	    }
2149 	  s = C_OMP_CLAUSE_SPLIT_SIMD;
2150 	  break;
2151 	/* Shared and default clauses are allowed on parallel, teams and
2152 	   taskloop.  */
2153 	case OMP_CLAUSE_SHARED:
2154 	case OMP_CLAUSE_DEFAULT:
2155 	  if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP))
2156 	      != 0)
2157 	    {
2158 	      if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS))
2159 		  != 0)
2160 		{
2161 		  c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2162 					OMP_CLAUSE_CODE (clauses));
2163 		  if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_SHARED)
2164 		    OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2165 		  else
2166 		    OMP_CLAUSE_DEFAULT_KIND (c)
2167 		      = OMP_CLAUSE_DEFAULT_KIND (clauses);
2168 		  OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_PARALLEL];
2169 		  cclauses[C_OMP_CLAUSE_SPLIT_PARALLEL] = c;
2170 		}
2171 	      s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2172 	      break;
2173 	    }
2174 	  if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_TEAMS))
2175 	      != 0)
2176 	    {
2177 	      if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS))
2178 		  == 0)
2179 		{
2180 		  s = C_OMP_CLAUSE_SPLIT_TEAMS;
2181 		  break;
2182 		}
2183 	      c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2184 				    OMP_CLAUSE_CODE (clauses));
2185 	      if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_SHARED)
2186 		OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2187 	      else
2188 		OMP_CLAUSE_DEFAULT_KIND (c)
2189 		  = OMP_CLAUSE_DEFAULT_KIND (clauses);
2190 	      OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TEAMS];
2191 	      cclauses[C_OMP_CLAUSE_SPLIT_TEAMS] = c;
2192 	    }
2193 	  s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2194 	  break;
2195 	/* order clauses are allowed on distribute, for, simd and loop.  */
2196 	case OMP_CLAUSE_ORDER:
2197 	  if ((mask & (OMP_CLAUSE_MASK_1
2198 		       << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)) != 0)
2199 	    {
2200 	      if (code == OMP_DISTRIBUTE)
2201 		{
2202 		  s = C_OMP_CLAUSE_SPLIT_DISTRIBUTE;
2203 		  break;
2204 		}
2205 	      c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2206 				    OMP_CLAUSE_ORDER);
2207 	      OMP_CLAUSE_ORDER_UNCONSTRAINED (c)
2208 		= OMP_CLAUSE_ORDER_UNCONSTRAINED (clauses);
2209 	      OMP_CLAUSE_ORDER_REPRODUCIBLE (c)
2210 		= OMP_CLAUSE_ORDER_REPRODUCIBLE (clauses);
2211 	      OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_DISTRIBUTE];
2212 	      cclauses[C_OMP_CLAUSE_SPLIT_DISTRIBUTE] = c;
2213 	    }
2214 	  if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)) != 0)
2215 	    {
2216 	      if (code == OMP_SIMD)
2217 		{
2218 		  c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2219 					OMP_CLAUSE_ORDER);
2220 		  OMP_CLAUSE_ORDER_UNCONSTRAINED (c)
2221 		    = OMP_CLAUSE_ORDER_UNCONSTRAINED (clauses);
2222 		  OMP_CLAUSE_ORDER_REPRODUCIBLE (c)
2223 		    = OMP_CLAUSE_ORDER_REPRODUCIBLE (clauses);
2224 		  OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_FOR];
2225 		  cclauses[C_OMP_CLAUSE_SPLIT_FOR] = c;
2226 		  s = C_OMP_CLAUSE_SPLIT_SIMD;
2227 		}
2228 	      else
2229 		s = C_OMP_CLAUSE_SPLIT_FOR;
2230 	    }
2231 	  else if (code == OMP_LOOP)
2232 	    s = C_OMP_CLAUSE_SPLIT_LOOP;
2233 	  else
2234 	    s = C_OMP_CLAUSE_SPLIT_SIMD;
2235 	  break;
2236 	/* Reduction is allowed on simd, for, parallel, sections, taskloop,
2237 	   teams and loop.  Duplicate it on all of them, but omit on for or
2238 	   sections if parallel is present (unless inscan, in that case
2239 	   omit on parallel).  If taskloop or loop is combined with
2240 	   parallel, omit it on parallel.  */
2241 	case OMP_CLAUSE_REDUCTION:
2242 	  if (OMP_CLAUSE_REDUCTION_TASK (clauses))
2243 	    {
2244 	      if (code == OMP_SIMD || code == OMP_LOOP)
2245 		{
2246 		  error_at (OMP_CLAUSE_LOCATION (clauses),
2247 			    "invalid %<task%> reduction modifier on construct "
2248 			    "combined with %<simd%> or %<loop%>");
2249 		  OMP_CLAUSE_REDUCTION_TASK (clauses) = 0;
2250 		}
2251 	      else if (code != OMP_SECTIONS
2252 		       && (mask & (OMP_CLAUSE_MASK_1
2253 				   << PRAGMA_OMP_CLAUSE_NUM_THREADS)) == 0
2254 		       && (mask & (OMP_CLAUSE_MASK_1
2255 				   << PRAGMA_OMP_CLAUSE_SCHEDULE)) == 0)
2256 		{
2257 		  error_at (OMP_CLAUSE_LOCATION (clauses),
2258 			    "invalid %<task%> reduction modifier on construct "
2259 			    "not combined with %<parallel%>, %<for%> or "
2260 			    "%<sections%>");
2261 		  OMP_CLAUSE_REDUCTION_TASK (clauses) = 0;
2262 		}
2263 	    }
2264 	  if (OMP_CLAUSE_REDUCTION_INSCAN (clauses)
2265 	      && ((mask & ((OMP_CLAUSE_MASK_1
2266 			    << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)
2267 			   | (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP)))
2268 		  != 0))
2269 	    {
2270 	      error_at (OMP_CLAUSE_LOCATION (clauses),
2271 			"%<inscan%> %<reduction%> clause on construct other "
2272 			"than %<for%>, %<simd%>, %<for simd%>, "
2273 			"%<parallel for%>, %<parallel for simd%>");
2274 	      OMP_CLAUSE_REDUCTION_INSCAN (clauses) = 0;
2275 	    }
2276 	  if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP)) != 0)
2277 	    {
2278 	      c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2279 				    OMP_CLAUSE_MAP);
2280 	      OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2281 	      OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
2282 	      OMP_CLAUSE_MAP_IMPLICIT (c) = 1;
2283 	      OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TARGET];
2284 	      cclauses[C_OMP_CLAUSE_SPLIT_TARGET] = c;
2285 	    }
2286 	  if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)) != 0)
2287 	    {
2288 	      if (code == OMP_SIMD)
2289 		{
2290 		  c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2291 					OMP_CLAUSE_REDUCTION);
2292 		  OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2293 		  OMP_CLAUSE_REDUCTION_CODE (c)
2294 		    = OMP_CLAUSE_REDUCTION_CODE (clauses);
2295 		  OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
2296 		    = OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses);
2297 		  OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
2298 		    = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses);
2299 		  OMP_CLAUSE_REDUCTION_INSCAN (c)
2300 		    = OMP_CLAUSE_REDUCTION_INSCAN (clauses);
2301 		  OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_SIMD];
2302 		  cclauses[C_OMP_CLAUSE_SPLIT_SIMD] = c;
2303 		}
2304 	      if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_TEAMS))
2305 		  != 0)
2306 		{
2307 		  c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2308 					OMP_CLAUSE_REDUCTION);
2309 		  OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2310 		  OMP_CLAUSE_REDUCTION_CODE (c)
2311 		    = OMP_CLAUSE_REDUCTION_CODE (clauses);
2312 		  OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
2313 		    = OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses);
2314 		  OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
2315 		    = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses);
2316 		  OMP_CLAUSE_REDUCTION_INSCAN (c)
2317 		    = OMP_CLAUSE_REDUCTION_INSCAN (clauses);
2318 		  OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TEAMS];
2319 		  cclauses[C_OMP_CLAUSE_SPLIT_TEAMS] = c;
2320 		  s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2321 		}
2322 	      else if ((mask & (OMP_CLAUSE_MASK_1
2323 				<< PRAGMA_OMP_CLAUSE_NUM_THREADS)) != 0
2324 		       && !OMP_CLAUSE_REDUCTION_INSCAN (clauses))
2325 		s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2326 	      else
2327 		s = C_OMP_CLAUSE_SPLIT_FOR;
2328 	    }
2329 	  else if (code == OMP_SECTIONS
2330 		   || code == OMP_PARALLEL
2331 		   || code == OMP_MASTER
2332 		   || code == OMP_MASKED)
2333 	    s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2334 	  else if (code == OMP_TASKLOOP)
2335 	    s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2336 	  else if (code == OMP_LOOP)
2337 	    s = C_OMP_CLAUSE_SPLIT_LOOP;
2338 	  else if (code == OMP_SIMD)
2339 	    {
2340 	      if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP))
2341 		  != 0)
2342 		{
2343 		  c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2344 					OMP_CLAUSE_REDUCTION);
2345 		  OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2346 		  OMP_CLAUSE_REDUCTION_CODE (c)
2347 		    = OMP_CLAUSE_REDUCTION_CODE (clauses);
2348 		  OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
2349 		    = OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses);
2350 		  OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
2351 		    = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses);
2352 		  OMP_CLAUSE_REDUCTION_INSCAN (c)
2353 		    = OMP_CLAUSE_REDUCTION_INSCAN (clauses);
2354 		  OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TASKLOOP];
2355 		  cclauses[C_OMP_CLAUSE_SPLIT_TASKLOOP] = c;
2356 		}
2357 	      else if ((mask & (OMP_CLAUSE_MASK_1
2358 				<< PRAGMA_OMP_CLAUSE_NUM_TEAMS)) != 0)
2359 		{
2360 		  c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2361 					OMP_CLAUSE_REDUCTION);
2362 		  OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2363 		  OMP_CLAUSE_REDUCTION_CODE (c)
2364 		    = OMP_CLAUSE_REDUCTION_CODE (clauses);
2365 		  OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
2366 		    = OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses);
2367 		  OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
2368 		    = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses);
2369 		  OMP_CLAUSE_REDUCTION_INSCAN (c)
2370 		    = OMP_CLAUSE_REDUCTION_INSCAN (clauses);
2371 		  OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TEAMS];
2372 		  cclauses[C_OMP_CLAUSE_SPLIT_TEAMS] = c;
2373 		}
2374 	      s = C_OMP_CLAUSE_SPLIT_SIMD;
2375 	    }
2376 	  else
2377 	    s = C_OMP_CLAUSE_SPLIT_TEAMS;
2378 	  break;
2379 	case OMP_CLAUSE_IN_REDUCTION:
2380 	  if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP)) != 0)
2381 	    {
2382 	      /* When on target, map(always, tofrom: item) is added as
2383 		 well.  For non-combined target it is added in the FEs.  */
2384 	      c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2385 				    OMP_CLAUSE_MAP);
2386 	      OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2387 	      OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_TOFROM);
2388 	      OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TARGET];
2389 	      cclauses[C_OMP_CLAUSE_SPLIT_TARGET] = c;
2390 	      s = C_OMP_CLAUSE_SPLIT_TARGET;
2391 	      break;
2392 	    }
2393 	  /* in_reduction on taskloop simd becomes reduction on the simd
2394 	     and keeps being in_reduction on taskloop.  */
2395 	  if (code == OMP_SIMD)
2396 	    {
2397 	      c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2398 				    OMP_CLAUSE_REDUCTION);
2399 	      OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clauses);
2400 	      OMP_CLAUSE_REDUCTION_CODE (c)
2401 		= OMP_CLAUSE_REDUCTION_CODE (clauses);
2402 	      OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
2403 		= OMP_CLAUSE_REDUCTION_PLACEHOLDER (clauses);
2404 	      OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
2405 		= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (clauses);
2406 	      OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_SIMD];
2407 	      cclauses[C_OMP_CLAUSE_SPLIT_SIMD] = c;
2408 	    }
2409 	  s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2410 	  break;
2411 	case OMP_CLAUSE_IF:
2412 	  if (OMP_CLAUSE_IF_MODIFIER (clauses) != ERROR_MARK)
2413 	    {
2414 	      s = C_OMP_CLAUSE_SPLIT_COUNT;
2415 	      switch (OMP_CLAUSE_IF_MODIFIER (clauses))
2416 		{
2417 		case OMP_PARALLEL:
2418 		  if ((mask & (OMP_CLAUSE_MASK_1
2419 			       << PRAGMA_OMP_CLAUSE_NUM_THREADS)) != 0)
2420 		    s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2421 		  break;
2422 		case OMP_SIMD:
2423 		  if (code == OMP_SIMD)
2424 		    s = C_OMP_CLAUSE_SPLIT_SIMD;
2425 		  break;
2426 		case OMP_TASKLOOP:
2427 		  if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP))
2428 		      != 0)
2429 		    s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2430 		  break;
2431 		case OMP_TARGET:
2432 		  if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP))
2433 		      != 0)
2434 		    s = C_OMP_CLAUSE_SPLIT_TARGET;
2435 		  break;
2436 		default:
2437 		  break;
2438 		}
2439 	      if (s != C_OMP_CLAUSE_SPLIT_COUNT)
2440 		break;
2441 	      /* Error-recovery here, invalid if-modifier specified, add the
2442 		 clause to just one construct.  */
2443 	      if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP)) != 0)
2444 		s = C_OMP_CLAUSE_SPLIT_TARGET;
2445 	      else if ((mask & (OMP_CLAUSE_MASK_1
2446 				<< PRAGMA_OMP_CLAUSE_NUM_THREADS)) != 0)
2447 		s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2448 	      else if ((mask & (OMP_CLAUSE_MASK_1
2449 				<< PRAGMA_OMP_CLAUSE_NOGROUP)) != 0)
2450 		s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2451 	      else if (code == OMP_SIMD)
2452 		s = C_OMP_CLAUSE_SPLIT_SIMD;
2453 	      else
2454 		gcc_unreachable ();
2455 	      break;
2456 	    }
2457 	  /* Otherwise, duplicate if clause to all constructs.  */
2458 	  if (code == OMP_SIMD)
2459 	    {
2460 	      if ((mask & ((OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP)
2461 			   | (OMP_CLAUSE_MASK_1
2462 			      << PRAGMA_OMP_CLAUSE_NUM_THREADS)
2463 			   | (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP)))
2464 		  != 0)
2465 		{
2466 		  c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2467 					OMP_CLAUSE_IF);
2468 		  OMP_CLAUSE_IF_MODIFIER (c)
2469 		    = OMP_CLAUSE_IF_MODIFIER (clauses);
2470 		  OMP_CLAUSE_IF_EXPR (c) = OMP_CLAUSE_IF_EXPR (clauses);
2471 		  OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_SIMD];
2472 		  cclauses[C_OMP_CLAUSE_SPLIT_SIMD] = c;
2473 		}
2474 	      else
2475 		{
2476 		  s = C_OMP_CLAUSE_SPLIT_SIMD;
2477 		  break;
2478 		}
2479 	    }
2480 	  if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP))
2481 	      != 0)
2482 	    {
2483 	      if ((mask & (OMP_CLAUSE_MASK_1
2484 			   << PRAGMA_OMP_CLAUSE_NUM_THREADS)) != 0)
2485 		{
2486 		  c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2487 					OMP_CLAUSE_IF);
2488 		  OMP_CLAUSE_IF_MODIFIER (c)
2489 		    = OMP_CLAUSE_IF_MODIFIER (clauses);
2490 		  OMP_CLAUSE_IF_EXPR (c) = OMP_CLAUSE_IF_EXPR (clauses);
2491 		  OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TASKLOOP];
2492 		  cclauses[C_OMP_CLAUSE_SPLIT_TASKLOOP] = c;
2493 		  s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2494 		}
2495 	      else
2496 		s = C_OMP_CLAUSE_SPLIT_TASKLOOP;
2497 	    }
2498 	  else if ((mask & (OMP_CLAUSE_MASK_1
2499 			    << PRAGMA_OMP_CLAUSE_NUM_THREADS)) != 0)
2500 	    {
2501 	      if ((mask & (OMP_CLAUSE_MASK_1
2502 			   << PRAGMA_OMP_CLAUSE_MAP)) != 0)
2503 		{
2504 		  c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2505 					OMP_CLAUSE_IF);
2506 		  OMP_CLAUSE_IF_MODIFIER (c)
2507 		    = OMP_CLAUSE_IF_MODIFIER (clauses);
2508 		  OMP_CLAUSE_IF_EXPR (c) = OMP_CLAUSE_IF_EXPR (clauses);
2509 		  OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TARGET];
2510 		  cclauses[C_OMP_CLAUSE_SPLIT_TARGET] = c;
2511 		  s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2512 		}
2513 	      else
2514 		s = C_OMP_CLAUSE_SPLIT_PARALLEL;
2515 	    }
2516 	  else
2517 	    s = C_OMP_CLAUSE_SPLIT_TARGET;
2518 	  break;
2519 	case OMP_CLAUSE_LINEAR:
2520 	  /* Linear clause is allowed on simd and for.  Put it on the
2521 	     innermost construct.  */
2522 	  if (code == OMP_SIMD)
2523 	    s = C_OMP_CLAUSE_SPLIT_SIMD;
2524 	  else
2525 	    s = C_OMP_CLAUSE_SPLIT_FOR;
2526 	  break;
2527 	case OMP_CLAUSE_NOWAIT:
2528 	  /* Nowait clause is allowed on target, for and sections, but
2529 	     is not allowed on parallel for or parallel sections.  Therefore,
2530 	     put it on target construct if present, because that can only
2531 	     be combined with parallel for{, simd} and not with for{, simd},
2532 	     otherwise to the worksharing construct.  */
2533 	  if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP))
2534 	      != 0)
2535 	    s = C_OMP_CLAUSE_SPLIT_TARGET;
2536 	  else
2537 	    s = C_OMP_CLAUSE_SPLIT_FOR;
2538 	  break;
2539 	  /* thread_limit is allowed on target and teams.  Distribute it
2540 	     to all.  */
2541 	case OMP_CLAUSE_THREAD_LIMIT:
2542 	  if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP))
2543 	      != 0)
2544 	    {
2545 	      if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_TEAMS))
2546 		  != 0)
2547 		{
2548 		  c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2549 					OMP_CLAUSE_THREAD_LIMIT);
2550 		  OMP_CLAUSE_THREAD_LIMIT_EXPR (c)
2551 		    = OMP_CLAUSE_THREAD_LIMIT_EXPR (clauses);
2552 		  OMP_CLAUSE_CHAIN (c) = cclauses[C_OMP_CLAUSE_SPLIT_TARGET];
2553 		  cclauses[C_OMP_CLAUSE_SPLIT_TARGET] = c;
2554 		}
2555 	      else
2556 		{
2557 		  s = C_OMP_CLAUSE_SPLIT_TARGET;
2558 		  break;
2559 		}
2560 	    }
2561 	  s = C_OMP_CLAUSE_SPLIT_TEAMS;
2562 	  break;
2563 	/* Allocate clause is allowed on target, teams, distribute, parallel,
2564 	   for, sections and taskloop.  Distribute it to all.  */
2565 	case OMP_CLAUSE_ALLOCATE:
2566 	  s = C_OMP_CLAUSE_SPLIT_COUNT;
2567 	  for (i = 0; i < C_OMP_CLAUSE_SPLIT_COUNT; i++)
2568 	    {
2569 	      switch (i)
2570 		{
2571 		case C_OMP_CLAUSE_SPLIT_TARGET:
2572 		  if ((mask & (OMP_CLAUSE_MASK_1
2573 			       << PRAGMA_OMP_CLAUSE_MAP)) == 0)
2574 		    continue;
2575 		  break;
2576 		case C_OMP_CLAUSE_SPLIT_TEAMS:
2577 		  if ((mask & (OMP_CLAUSE_MASK_1
2578 			       << PRAGMA_OMP_CLAUSE_NUM_TEAMS)) == 0)
2579 		    continue;
2580 		  break;
2581 		case C_OMP_CLAUSE_SPLIT_DISTRIBUTE:
2582 		  if ((mask & (OMP_CLAUSE_MASK_1
2583 			       << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)) == 0)
2584 		    continue;
2585 		  break;
2586 		case C_OMP_CLAUSE_SPLIT_PARALLEL:
2587 		  if ((mask & (OMP_CLAUSE_MASK_1
2588 			       << PRAGMA_OMP_CLAUSE_NUM_THREADS)) == 0)
2589 		    continue;
2590 		  break;
2591 		case C_OMP_CLAUSE_SPLIT_FOR:
2592 		  STATIC_ASSERT (C_OMP_CLAUSE_SPLIT_SECTIONS
2593 				 == C_OMP_CLAUSE_SPLIT_FOR
2594 				 && (C_OMP_CLAUSE_SPLIT_TASKLOOP
2595 				     == C_OMP_CLAUSE_SPLIT_FOR)
2596 				 && (C_OMP_CLAUSE_SPLIT_LOOP
2597 				     == C_OMP_CLAUSE_SPLIT_FOR));
2598 		  if (code == OMP_SECTIONS)
2599 		    break;
2600 		  if ((mask & (OMP_CLAUSE_MASK_1
2601 			       << PRAGMA_OMP_CLAUSE_SCHEDULE)) != 0)
2602 		    break;
2603 		  if ((mask & (OMP_CLAUSE_MASK_1
2604 			       << PRAGMA_OMP_CLAUSE_NOGROUP)) != 0)
2605 		    break;
2606 		  continue;
2607 		case C_OMP_CLAUSE_SPLIT_SIMD:
2608 		  continue;
2609 		default:
2610 		  gcc_unreachable ();
2611 		}
2612 	      if (s != C_OMP_CLAUSE_SPLIT_COUNT)
2613 		{
2614 		  c = build_omp_clause (OMP_CLAUSE_LOCATION (clauses),
2615 					OMP_CLAUSE_ALLOCATE);
2616 		  OMP_CLAUSE_DECL (c)
2617 		    = OMP_CLAUSE_DECL (clauses);
2618 		  OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
2619 		    = OMP_CLAUSE_ALLOCATE_ALLOCATOR (clauses);
2620 		  OMP_CLAUSE_ALLOCATE_ALIGN (c)
2621 		    = OMP_CLAUSE_ALLOCATE_ALIGN (clauses);
2622 		  OMP_CLAUSE_CHAIN (c) = cclauses[s];
2623 		  cclauses[s] = c;
2624 		  has_dup_allocate = true;
2625 		}
2626 	      s = (enum c_omp_clause_split) i;
2627 	    }
2628 	  gcc_assert (s != C_OMP_CLAUSE_SPLIT_COUNT);
2629 	  break;
2630 	default:
2631 	  gcc_unreachable ();
2632 	}
2633       OMP_CLAUSE_CHAIN (clauses) = cclauses[s];
2634       cclauses[s] = clauses;
2635     }
2636 
2637   if (has_dup_allocate)
2638     {
2639       bool need_prune = false;
2640       bitmap_obstack_initialize (NULL);
2641       for (i = 0; i < C_OMP_CLAUSE_SPLIT_SIMD - (code == OMP_LOOP); i++)
2642 	if (cclauses[i])
2643 	  {
2644 	    bitmap_head allocate_head;
2645 	    bitmap_initialize (&allocate_head, &bitmap_default_obstack);
2646 	    for (c = cclauses[i]; c; c = OMP_CLAUSE_CHAIN (c))
2647 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
2648 		  && DECL_P (OMP_CLAUSE_DECL (c)))
2649 		bitmap_set_bit (&allocate_head,
2650 				DECL_UID (OMP_CLAUSE_DECL (c)));
2651 	    for (c = cclauses[i]; c; c = OMP_CLAUSE_CHAIN (c))
2652 	      switch (OMP_CLAUSE_CODE (c))
2653 		{
2654 		case OMP_CLAUSE_REDUCTION:
2655 		case OMP_CLAUSE_IN_REDUCTION:
2656 		case OMP_CLAUSE_TASK_REDUCTION:
2657 		  if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
2658 		    {
2659 		      tree t = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
2660 		      if (TREE_CODE (t) == POINTER_PLUS_EXPR)
2661 			t = TREE_OPERAND (t, 0);
2662 		      if (TREE_CODE (t) == ADDR_EXPR
2663 			  || TREE_CODE (t) == INDIRECT_REF)
2664 			t = TREE_OPERAND (t, 0);
2665 		      if (DECL_P (t))
2666 			bitmap_clear_bit (&allocate_head, DECL_UID (t));
2667 		      break;
2668 		    }
2669 		  else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == TREE_LIST)
2670 		    {
2671 		      tree t;
2672 		      for (t = OMP_CLAUSE_DECL (c);
2673 			   TREE_CODE (t) == TREE_LIST; t = TREE_CHAIN (t))
2674 			;
2675 		      if (DECL_P (t))
2676 			bitmap_clear_bit (&allocate_head, DECL_UID (t));
2677 		      break;
2678 		    }
2679 		  /* FALLTHRU */
2680 		case OMP_CLAUSE_PRIVATE:
2681 		case OMP_CLAUSE_FIRSTPRIVATE:
2682 		case OMP_CLAUSE_LASTPRIVATE:
2683 		case OMP_CLAUSE_LINEAR:
2684 		  if (DECL_P (OMP_CLAUSE_DECL (c)))
2685 		    bitmap_clear_bit (&allocate_head,
2686 				      DECL_UID (OMP_CLAUSE_DECL (c)));
2687 		  break;
2688 		default:
2689 		  break;
2690 		}
2691 	    for (c = cclauses[i]; c; c = OMP_CLAUSE_CHAIN (c))
2692 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
2693 		  && DECL_P (OMP_CLAUSE_DECL (c))
2694 		  && bitmap_bit_p (&allocate_head,
2695 				   DECL_UID (OMP_CLAUSE_DECL (c))))
2696 		{
2697 		  /* Mark allocate clauses which don't have corresponding
2698 		     explicit data sharing clause.  */
2699 		  OMP_CLAUSE_ALLOCATE_COMBINED (c) = 1;
2700 		  need_prune = true;
2701 		}
2702 	  }
2703       bitmap_obstack_release (NULL);
2704       if (need_prune)
2705 	{
2706 	  /* At least one allocate clause has been marked.  Walk all the
2707 	     duplicated allocate clauses in sync.  If it is marked in all
2708 	     constituent constructs, diagnose it as invalid and remove
2709 	     them.  Otherwise, remove all marked inner clauses inside
2710 	     a construct that doesn't have them marked.  Keep the outer
2711 	     marked ones, because some clause duplication is done only
2712 	     during gimplification.  */
2713 	  tree *p[C_OMP_CLAUSE_SPLIT_COUNT];
2714 	  for (i = 0; i < C_OMP_CLAUSE_SPLIT_COUNT; i++)
2715 	    if (cclauses[i] == NULL_TREE
2716 		|| i == C_OMP_CLAUSE_SPLIT_SIMD
2717 		|| (i == C_OMP_CLAUSE_SPLIT_LOOP && code == OMP_LOOP))
2718 	      p[i] = NULL;
2719 	    else
2720 	      p[i] = &cclauses[i];
2721 	  do
2722 	    {
2723 	      int j = -1;
2724 	      tree seen = NULL_TREE;
2725 	      for (i = C_OMP_CLAUSE_SPLIT_COUNT - 1; i >= 0; i--)
2726 		if (p[i])
2727 		  {
2728 		    while (*p[i]
2729 			   && OMP_CLAUSE_CODE (*p[i]) != OMP_CLAUSE_ALLOCATE)
2730 		      p[i] = &OMP_CLAUSE_CHAIN (*p[i]);
2731 		    if (*p[i] == NULL_TREE)
2732 		      {
2733 			i = C_OMP_CLAUSE_SPLIT_COUNT;
2734 			break;
2735 		      }
2736 		    if (!OMP_CLAUSE_ALLOCATE_COMBINED (*p[i]) && j == -1)
2737 		      j = i;
2738 		    seen = *p[i];
2739 		  }
2740 	      if (i == C_OMP_CLAUSE_SPLIT_COUNT)
2741 		break;
2742 	      if (j == -1)
2743 		error_at (OMP_CLAUSE_LOCATION (seen),
2744 			  "%qD specified in %<allocate%> clause but not in "
2745 			  "an explicit privatization clause",
2746 			  OMP_CLAUSE_DECL (seen));
2747 	      for (i = 0; i < C_OMP_CLAUSE_SPLIT_COUNT; i++)
2748 		if (p[i])
2749 		  {
2750 		    if (i > j)
2751 		      /* Remove.  */
2752 		      *p[i] = OMP_CLAUSE_CHAIN (*p[i]);
2753 		    else
2754 		      /* Keep.  */
2755 		      p[i] = &OMP_CLAUSE_CHAIN (*p[i]);
2756 		  }
2757 	    }
2758 	  while (1);
2759 	}
2760     }
2761 
2762   if (!flag_checking)
2763     return;
2764 
2765   if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_MAP)) == 0)
2766     gcc_assert (cclauses[C_OMP_CLAUSE_SPLIT_TARGET] == NULL_TREE);
2767   if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_TEAMS)) == 0)
2768     gcc_assert (cclauses[C_OMP_CLAUSE_SPLIT_TEAMS] == NULL_TREE);
2769   if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_DIST_SCHEDULE)) == 0
2770       && (mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_FILTER)) == 0)
2771     gcc_assert (cclauses[C_OMP_CLAUSE_SPLIT_DISTRIBUTE] == NULL_TREE);
2772   if ((mask & (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NUM_THREADS)) == 0)
2773     gcc_assert (cclauses[C_OMP_CLAUSE_SPLIT_PARALLEL] == NULL_TREE);
2774   if ((mask & ((OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_SCHEDULE)
2775 	       | (OMP_CLAUSE_MASK_1 << PRAGMA_OMP_CLAUSE_NOGROUP))) == 0
2776       && code != OMP_SECTIONS
2777       && code != OMP_LOOP)
2778     gcc_assert (cclauses[C_OMP_CLAUSE_SPLIT_FOR] == NULL_TREE);
2779   if (code != OMP_SIMD)
2780     gcc_assert (cclauses[C_OMP_CLAUSE_SPLIT_SIMD] == NULL_TREE);
2781 }
2782 
2783 
2784 /* qsort callback to compare #pragma omp declare simd clauses.  */
2785 
2786 static int
c_omp_declare_simd_clause_cmp(const void * p,const void * q)2787 c_omp_declare_simd_clause_cmp (const void *p, const void *q)
2788 {
2789   tree a = *(const tree *) p;
2790   tree b = *(const tree *) q;
2791   if (OMP_CLAUSE_CODE (a) != OMP_CLAUSE_CODE (b))
2792     {
2793       if (OMP_CLAUSE_CODE (a) > OMP_CLAUSE_CODE (b))
2794 	return -1;
2795       return 1;
2796     }
2797   if (OMP_CLAUSE_CODE (a) != OMP_CLAUSE_SIMDLEN
2798       && OMP_CLAUSE_CODE (a) != OMP_CLAUSE_INBRANCH
2799       && OMP_CLAUSE_CODE (a) != OMP_CLAUSE_NOTINBRANCH)
2800     {
2801       int c = tree_to_shwi (OMP_CLAUSE_DECL (a));
2802       int d = tree_to_shwi (OMP_CLAUSE_DECL (b));
2803       if (c < d)
2804 	return 1;
2805       if (c > d)
2806 	return -1;
2807     }
2808   return 0;
2809 }
2810 
2811 /* Change PARM_DECLs in OMP_CLAUSE_DECL of #pragma omp declare simd
2812    CLAUSES on FNDECL into argument indexes and sort them.  */
2813 
2814 tree
c_omp_declare_simd_clauses_to_numbers(tree parms,tree clauses)2815 c_omp_declare_simd_clauses_to_numbers (tree parms, tree clauses)
2816 {
2817   tree c;
2818   vec<tree> clvec = vNULL;
2819 
2820   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2821     {
2822       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SIMDLEN
2823 	  && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_INBRANCH
2824 	  && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_NOTINBRANCH)
2825 	{
2826 	  tree decl = OMP_CLAUSE_DECL (c);
2827 	  tree arg;
2828 	  int idx;
2829 	  for (arg = parms, idx = 0; arg;
2830 	       arg = TREE_CHAIN (arg), idx++)
2831 	    if (arg == decl)
2832 	      break;
2833 	  if (arg == NULL_TREE)
2834 	    {
2835 	      error_at (OMP_CLAUSE_LOCATION (c),
2836 			"%qD is not a function argument", decl);
2837 	      continue;
2838 	    }
2839 	  OMP_CLAUSE_DECL (c) = build_int_cst (integer_type_node, idx);
2840 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
2841 	      && OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c))
2842 	    {
2843 	      decl = OMP_CLAUSE_LINEAR_STEP (c);
2844 	      for (arg = parms, idx = 0; arg;
2845 		   arg = TREE_CHAIN (arg), idx++)
2846 		if (arg == decl)
2847 		  break;
2848 	      if (arg == NULL_TREE)
2849 		{
2850 		  error_at (OMP_CLAUSE_LOCATION (c),
2851 			    "%qD is not a function argument", decl);
2852 		  continue;
2853 		}
2854 	      OMP_CLAUSE_LINEAR_STEP (c)
2855 		= build_int_cst (integer_type_node, idx);
2856 	    }
2857 	}
2858       clvec.safe_push (c);
2859     }
2860   if (!clvec.is_empty ())
2861     {
2862       unsigned int len = clvec.length (), i;
2863       clvec.qsort (c_omp_declare_simd_clause_cmp);
2864       clauses = clvec[0];
2865       for (i = 0; i < len; i++)
2866 	OMP_CLAUSE_CHAIN (clvec[i]) = (i < len - 1) ? clvec[i + 1] : NULL_TREE;
2867     }
2868   else
2869     clauses = NULL_TREE;
2870   clvec.release ();
2871   return clauses;
2872 }
2873 
2874 /* Change argument indexes in CLAUSES of FNDECL back to PARM_DECLs.  */
2875 
2876 void
c_omp_declare_simd_clauses_to_decls(tree fndecl,tree clauses)2877 c_omp_declare_simd_clauses_to_decls (tree fndecl, tree clauses)
2878 {
2879   tree c;
2880 
2881   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2882     if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SIMDLEN
2883 	&& OMP_CLAUSE_CODE (c) != OMP_CLAUSE_INBRANCH
2884 	&& OMP_CLAUSE_CODE (c) != OMP_CLAUSE_NOTINBRANCH)
2885       {
2886 	int idx = tree_to_shwi (OMP_CLAUSE_DECL (c)), i;
2887 	tree arg;
2888 	for (arg = DECL_ARGUMENTS (fndecl), i = 0; arg;
2889 	     arg = TREE_CHAIN (arg), i++)
2890 	  if (i == idx)
2891 	    break;
2892 	gcc_assert (arg);
2893 	OMP_CLAUSE_DECL (c) = arg;
2894 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
2895 	    && OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c))
2896 	  {
2897 	    idx = tree_to_shwi (OMP_CLAUSE_LINEAR_STEP (c));
2898 	    for (arg = DECL_ARGUMENTS (fndecl), i = 0; arg;
2899 		 arg = TREE_CHAIN (arg), i++)
2900 	      if (i == idx)
2901 		break;
2902 	    gcc_assert (arg);
2903 	    OMP_CLAUSE_LINEAR_STEP (c) = arg;
2904 	  }
2905       }
2906 }
2907 
2908 /* Return true for __func__ and similar function-local predefined
2909    variables (which are in OpenMP predetermined shared, allowed in
2910    shared/firstprivate clauses).  */
2911 
2912 bool
c_omp_predefined_variable(tree decl)2913 c_omp_predefined_variable (tree decl)
2914 {
2915   if (VAR_P (decl)
2916       && DECL_ARTIFICIAL (decl)
2917       && TREE_STATIC (decl)
2918       && DECL_NAME (decl))
2919     {
2920       if (TREE_READONLY (decl)
2921 	  && (DECL_NAME (decl) == ridpointers[RID_C99_FUNCTION_NAME]
2922 	      || DECL_NAME (decl) == ridpointers[RID_FUNCTION_NAME]
2923 	      || DECL_NAME (decl) == ridpointers[RID_PRETTY_FUNCTION_NAME]))
2924 	return true;
2925       /* For UBSan handle the same also ubsan_create_data created
2926 	 variables.  There is no magic flag for those, but user variables
2927 	 shouldn't be DECL_ARTIFICIAL or have TYPE_ARTIFICIAL type with
2928 	 such names.  */
2929       if ((flag_sanitize & (SANITIZE_UNDEFINED
2930 			    | SANITIZE_UNDEFINED_NONDEFAULT)) != 0
2931 	  && DECL_IGNORED_P (decl)
2932 	  && !TREE_READONLY (decl)
2933 	  && TREE_CODE (DECL_NAME (decl)) == IDENTIFIER_NODE
2934 	  && TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE
2935 	  && TYPE_ARTIFICIAL (TREE_TYPE (decl))
2936 	  && TYPE_NAME (TREE_TYPE (decl))
2937 	  && TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
2938 	  && DECL_NAME (TYPE_NAME (TREE_TYPE (decl)))
2939 	  && (TREE_CODE (DECL_NAME (TYPE_NAME (TREE_TYPE (decl))))
2940 	      == IDENTIFIER_NODE))
2941 	{
2942 	  tree id1 = DECL_NAME (decl);
2943 	  tree id2 = DECL_NAME (TYPE_NAME (TREE_TYPE (decl)));
2944 	  if (IDENTIFIER_LENGTH (id1) >= sizeof ("ubsan_data") - 1
2945 	      && IDENTIFIER_LENGTH (id2) >= sizeof ("__ubsan__data")
2946 	      && !memcmp (IDENTIFIER_POINTER (id2), "__ubsan_",
2947 			  sizeof ("__ubsan_") - 1)
2948 	      && !memcmp (IDENTIFIER_POINTER (id2) + IDENTIFIER_LENGTH (id2)
2949 			  - sizeof ("_data") + 1, "_data",
2950 			  sizeof ("_data") - 1)
2951 	      && strstr (IDENTIFIER_POINTER (id1), "ubsan_data"))
2952 	    return true;
2953 	}
2954     }
2955   return false;
2956 }
2957 
2958 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute of DECL
2959    is predetermined.  */
2960 
2961 enum omp_clause_default_kind
c_omp_predetermined_sharing(tree decl)2962 c_omp_predetermined_sharing (tree decl)
2963 {
2964   /* Predetermine artificial variables holding integral values, those
2965      are usually result of gimplify_one_sizepos or SAVE_EXPR
2966      gimplification.  */
2967   if (VAR_P (decl)
2968       && DECL_ARTIFICIAL (decl)
2969       && INTEGRAL_TYPE_P (TREE_TYPE (decl)))
2970     return OMP_CLAUSE_DEFAULT_SHARED;
2971 
2972   if (c_omp_predefined_variable (decl))
2973     return OMP_CLAUSE_DEFAULT_SHARED;
2974 
2975   return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2976 }
2977 
2978 /* OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED unless OpenMP mapping attribute
2979    of DECL is predetermined.  */
2980 
2981 enum omp_clause_defaultmap_kind
c_omp_predetermined_mapping(tree decl)2982 c_omp_predetermined_mapping (tree decl)
2983 {
2984   /* Predetermine artificial variables holding integral values, those
2985      are usually result of gimplify_one_sizepos or SAVE_EXPR
2986      gimplification.  */
2987   if (VAR_P (decl)
2988       && DECL_ARTIFICIAL (decl)
2989       && INTEGRAL_TYPE_P (TREE_TYPE (decl)))
2990     return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
2991 
2992   if (c_omp_predefined_variable (decl))
2993     return OMP_CLAUSE_DEFAULTMAP_TO;
2994 
2995   return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
2996 }
2997 
2998 
2999 /* Used to merge map clause information in c_omp_adjust_map_clauses.  */
3000 struct map_clause
3001 {
3002   tree clause;
3003   bool firstprivate_ptr_p;
3004   bool decl_mapped;
3005   bool omp_declare_target;
map_clausemap_clause3006   map_clause (void) : clause (NULL_TREE), firstprivate_ptr_p (false),
3007     decl_mapped (false), omp_declare_target (false) { }
3008 };
3009 
3010 /* Adjust map clauses after normal clause parsing, mainly to turn specific
3011    base-pointer map cases into attach/detach and mark them addressable.  */
3012 void
c_omp_adjust_map_clauses(tree clauses,bool is_target)3013 c_omp_adjust_map_clauses (tree clauses, bool is_target)
3014 {
3015   if (!is_target)
3016     {
3017       /* If this is not a target construct, just turn firstprivate pointers
3018 	 into attach/detach, the runtime will check and do the rest.  */
3019 
3020       for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
3021 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
3022 	    && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
3023 	    && DECL_P (OMP_CLAUSE_DECL (c))
3024 	    && POINTER_TYPE_P (TREE_TYPE (OMP_CLAUSE_DECL (c))))
3025 	  {
3026 	    tree ptr = OMP_CLAUSE_DECL (c);
3027 	    OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ATTACH_DETACH);
3028 	    c_common_mark_addressable_vec (ptr);
3029 	  }
3030       return;
3031     }
3032 
3033   hash_map<tree, map_clause> maps;
3034 
3035   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
3036     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
3037 	&& DECL_P (OMP_CLAUSE_DECL (c)))
3038       {
3039 	/* If this is for a target construct, the firstprivate pointer
3040 	   is changed to attach/detach if either is true:
3041 	   (1) the base-pointer is mapped in this same construct, or
3042 	   (2) the base-pointer is a variable place on the device by
3043 	       "declare target" directives.
3044 
3045 	   Here we iterate through all map clauses collecting these cases,
3046 	   and merge them with a hash_map to process below.  */
3047 
3048 	if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
3049 	    && POINTER_TYPE_P (TREE_TYPE (OMP_CLAUSE_DECL (c))))
3050 	  {
3051 	    tree ptr = OMP_CLAUSE_DECL (c);
3052 	    map_clause &mc = maps.get_or_insert (ptr);
3053 	    if (mc.clause == NULL_TREE)
3054 	      mc.clause = c;
3055 	    mc.firstprivate_ptr_p = true;
3056 
3057 	    if (is_global_var (ptr)
3058 		&& lookup_attribute ("omp declare target",
3059 				     DECL_ATTRIBUTES (ptr)))
3060 	      mc.omp_declare_target = true;
3061 	  }
3062 	else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALLOC
3063 		 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO
3064 		 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FROM
3065 		 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TOFROM
3066 		 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
3067 		 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_FROM
3068 		 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
3069 	  {
3070 	    map_clause &mc = maps.get_or_insert (OMP_CLAUSE_DECL (c));
3071 	    mc.decl_mapped = true;
3072 	  }
3073       }
3074 
3075   for (hash_map<tree, map_clause>::iterator i = maps.begin ();
3076        i != maps.end (); ++i)
3077     {
3078       map_clause &mc = (*i).second;
3079 
3080       if (mc.firstprivate_ptr_p
3081 	  && (mc.decl_mapped || mc.omp_declare_target))
3082 	{
3083 	  OMP_CLAUSE_SET_MAP_KIND (mc.clause, GOMP_MAP_ATTACH_DETACH);
3084 	  c_common_mark_addressable_vec (OMP_CLAUSE_DECL (mc.clause));
3085 	}
3086     }
3087 }
3088 
3089 static const struct c_omp_directive omp_directives[] = {
3090   /* Keep this alphabetically sorted by the first word.  Non-null second/third
3091      if any should precede null ones.  */
3092   { "allocate", nullptr, nullptr, PRAGMA_OMP_ALLOCATE,
3093     C_OMP_DIR_DECLARATIVE, false },
3094   /* { "assume", nullptr, nullptr, PRAGMA_OMP_ASSUME,
3095     C_OMP_DIR_INFORMATIONAL, false }, */
3096   /* { "assumes", nullptr, nullptr, PRAGMA_OMP_ASSUMES,
3097     C_OMP_DIR_INFORMATIONAL, false }, */
3098   { "atomic", nullptr, nullptr, PRAGMA_OMP_ATOMIC,
3099     C_OMP_DIR_CONSTRUCT, false },
3100   { "barrier", nullptr, nullptr, PRAGMA_OMP_BARRIER,
3101     C_OMP_DIR_STANDALONE, false },
3102   /* { "begin", "assumes", nullptr, PRAGMA_OMP_BEGIN,
3103     C_OMP_DIR_INFORMATIONAL, false }, */
3104   /* { "begin", "declare", "target", PRAGMA_OMP_BEGIN,
3105     C_OMP_DIR_DECLARATIVE, false }, */
3106   /* { "begin", "declare", "variant", PRAGMA_OMP_BEGIN,
3107     C_OMP_DIR_DECLARATIVE, false }, */
3108   /* { "begin", "metadirective", nullptr, PRAGMA_OMP_BEGIN,
3109     C_OMP_DIR_???, ??? },  */
3110   { "cancel", nullptr, nullptr, PRAGMA_OMP_CANCEL,
3111     C_OMP_DIR_STANDALONE, false },
3112   { "cancellation", "point", nullptr, PRAGMA_OMP_CANCELLATION_POINT,
3113     C_OMP_DIR_STANDALONE, false },
3114   { "critical", nullptr, nullptr, PRAGMA_OMP_CRITICAL,
3115     C_OMP_DIR_CONSTRUCT, false },
3116   /* { "declare", "mapper", nullptr, PRAGMA_OMP_DECLARE,
3117     C_OMP_DIR_DECLARATIVE, false },  */
3118   { "declare", "reduction", nullptr, PRAGMA_OMP_DECLARE,
3119     C_OMP_DIR_DECLARATIVE, true },
3120   { "declare", "simd", nullptr, PRAGMA_OMP_DECLARE,
3121     C_OMP_DIR_DECLARATIVE, true },
3122   { "declare", "target", nullptr, PRAGMA_OMP_DECLARE,
3123     C_OMP_DIR_DECLARATIVE, false },
3124   { "declare", "variant", nullptr, PRAGMA_OMP_DECLARE,
3125     C_OMP_DIR_DECLARATIVE, false },
3126   { "depobj", nullptr, nullptr, PRAGMA_OMP_DEPOBJ,
3127     C_OMP_DIR_STANDALONE, false },
3128   /* { "dispatch", nullptr, nullptr, PRAGMA_OMP_DISPATCH,
3129     C_OMP_DIR_CONSTRUCT, false },  */
3130   { "distribute", nullptr, nullptr, PRAGMA_OMP_DISTRIBUTE,
3131     C_OMP_DIR_CONSTRUCT, true },
3132   /* { "end", "assumes", nullptr, PRAGMA_OMP_END,
3133     C_OMP_DIR_INFORMATIONAL, false }, */
3134   { "end", "declare", "target", PRAGMA_OMP_END_DECLARE_TARGET,
3135     C_OMP_DIR_DECLARATIVE, false },
3136   /* { "end", "declare", "variant", PRAGMA_OMP_END,
3137     C_OMP_DIR_DECLARATIVE, false }, */
3138   /* { "end", "metadirective", nullptr, PRAGMA_OMP_END,
3139     C_OMP_DIR_???, ??? },  */
3140   /* error with at(execution) is C_OMP_DIR_STANDALONE.  */
3141   { "error", nullptr, nullptr, PRAGMA_OMP_ERROR,
3142     C_OMP_DIR_UTILITY, false },
3143   { "flush", nullptr, nullptr, PRAGMA_OMP_FLUSH,
3144     C_OMP_DIR_STANDALONE, false },
3145   { "for", nullptr, nullptr, PRAGMA_OMP_FOR,
3146     C_OMP_DIR_CONSTRUCT, true },
3147   /* { "interop", nullptr, nullptr, PRAGMA_OMP_INTEROP,
3148     C_OMP_DIR_STANDALONE, false },  */
3149   { "loop", nullptr, nullptr, PRAGMA_OMP_LOOP,
3150     C_OMP_DIR_CONSTRUCT, true },
3151   { "masked", nullptr, nullptr, PRAGMA_OMP_MASKED,
3152     C_OMP_DIR_CONSTRUCT, true },
3153   { "master", nullptr, nullptr, PRAGMA_OMP_MASTER,
3154     C_OMP_DIR_CONSTRUCT, true },
3155   /* { "metadirective", nullptr, nullptr, PRAGMA_OMP_METADIRECTIVE,
3156     C_OMP_DIR_???, ??? },  */
3157   { "nothing", nullptr, nullptr, PRAGMA_OMP_NOTHING,
3158     C_OMP_DIR_UTILITY, false },
3159   /* ordered with depend clause is C_OMP_DIR_STANDALONE.  */
3160   { "ordered", nullptr, nullptr, PRAGMA_OMP_ORDERED,
3161     C_OMP_DIR_CONSTRUCT, true },
3162   { "parallel", nullptr, nullptr, PRAGMA_OMP_PARALLEL,
3163     C_OMP_DIR_CONSTRUCT, true },
3164   { "requires", nullptr, nullptr, PRAGMA_OMP_REQUIRES,
3165     C_OMP_DIR_INFORMATIONAL, false },
3166   { "scan", nullptr, nullptr, PRAGMA_OMP_SCAN,
3167     C_OMP_DIR_CONSTRUCT, true },
3168   { "scope", nullptr, nullptr, PRAGMA_OMP_SCOPE,
3169     C_OMP_DIR_CONSTRUCT, false },
3170   { "section", nullptr, nullptr, PRAGMA_OMP_SECTION,
3171     C_OMP_DIR_CONSTRUCT, false },
3172   { "sections", nullptr, nullptr, PRAGMA_OMP_SECTIONS,
3173     C_OMP_DIR_CONSTRUCT, false },
3174   { "simd", nullptr, nullptr, PRAGMA_OMP_SIMD,
3175     C_OMP_DIR_CONSTRUCT, true },
3176   { "single", nullptr, nullptr, PRAGMA_OMP_SINGLE,
3177     C_OMP_DIR_CONSTRUCT, false },
3178   { "target", "data", nullptr, PRAGMA_OMP_TARGET,
3179     C_OMP_DIR_CONSTRUCT, false },
3180   { "target", "enter", "data", PRAGMA_OMP_TARGET,
3181     C_OMP_DIR_STANDALONE, false },
3182   { "target", "exit", "data", PRAGMA_OMP_TARGET,
3183     C_OMP_DIR_STANDALONE, false },
3184   { "target", "update", nullptr, PRAGMA_OMP_TARGET,
3185     C_OMP_DIR_STANDALONE, false },
3186   { "target", nullptr, nullptr, PRAGMA_OMP_TARGET,
3187     C_OMP_DIR_CONSTRUCT, true },
3188   { "task", nullptr, nullptr, PRAGMA_OMP_TASK,
3189     C_OMP_DIR_CONSTRUCT, false },
3190   { "taskgroup", nullptr, nullptr, PRAGMA_OMP_TASKGROUP,
3191     C_OMP_DIR_CONSTRUCT, false },
3192   { "taskloop", nullptr, nullptr, PRAGMA_OMP_TASKLOOP,
3193     C_OMP_DIR_CONSTRUCT, true },
3194   { "taskwait", nullptr, nullptr, PRAGMA_OMP_TASKWAIT,
3195     C_OMP_DIR_STANDALONE, false },
3196   { "taskyield", nullptr, nullptr, PRAGMA_OMP_TASKYIELD,
3197     C_OMP_DIR_STANDALONE, false },
3198   /* { "tile", nullptr, nullptr, PRAGMA_OMP_TILE,
3199     C_OMP_DIR_CONSTRUCT, false },  */
3200   { "teams", nullptr, nullptr, PRAGMA_OMP_TEAMS,
3201     C_OMP_DIR_CONSTRUCT, true },
3202   { "threadprivate", nullptr, nullptr, PRAGMA_OMP_THREADPRIVATE,
3203     C_OMP_DIR_DECLARATIVE, false }
3204   /* { "unroll", nullptr, nullptr, PRAGMA_OMP_UNROLL,
3205     C_OMP_DIR_CONSTRUCT, false },  */
3206 };
3207 
3208 /* Find (non-combined/composite) OpenMP directive (if any) which starts
3209    with FIRST keyword and for multi-word directives has SECOND and
3210    THIRD keyword after it.  */
3211 
3212 const struct c_omp_directive *
c_omp_categorize_directive(const char * first,const char * second,const char * third)3213 c_omp_categorize_directive (const char *first, const char *second,
3214 			    const char *third)
3215 {
3216   const size_t n_omp_directives = ARRAY_SIZE (omp_directives);
3217   for (size_t i = 0; i < n_omp_directives; i++)
3218     {
3219       if ((unsigned char) omp_directives[i].first[0]
3220 	  < (unsigned char) first[0])
3221 	continue;
3222       if ((unsigned char) omp_directives[i].first[0]
3223 	  > (unsigned char) first[0])
3224 	break;
3225       if (strcmp (omp_directives[i].first, first))
3226 	continue;
3227       if (!omp_directives[i].second)
3228 	return &omp_directives[i];
3229       if (!second || strcmp (omp_directives[i].second, second))
3230 	continue;
3231       if (!omp_directives[i].third)
3232 	return &omp_directives[i];
3233       if (!third || strcmp (omp_directives[i].third, third))
3234 	continue;
3235       return &omp_directives[i];
3236     }
3237   return NULL;
3238 }
3239