1 /* Perform the semantic phase of parsing, i.e., the process of
2 building tree structure, checking semantic consistency, and
3 building RTL. These routines are used both during actual parsing
4 and during the instantiation of template functions.
5
6 Copyright (C) 1998-2022 Free Software Foundation, Inc.
7 Written by Mark Mitchell (mmitchell@usa.net) based on code found
8 formerly in parse.y and pt.cc.
9
10 This file is part of GCC.
11
12 GCC is free software; you can redistribute it and/or modify it
13 under the terms of the GNU General Public License as published by
14 the Free Software Foundation; either version 3, or (at your option)
15 any later version.
16
17 GCC is distributed in the hope that it will be useful, but
18 WITHOUT ANY WARRANTY; without even the implied warranty of
19 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 General Public License for more details.
21
22 You should have received a copy of the GNU General Public License
23 along with GCC; see the file COPYING3. If not see
24 <http://www.gnu.org/licenses/>. */
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "target.h"
30 #include "bitmap.h"
31 #include "cp-tree.h"
32 #include "stringpool.h"
33 #include "cgraph.h"
34 #include "stmt.h"
35 #include "varasm.h"
36 #include "stor-layout.h"
37 #include "c-family/c-objc.h"
38 #include "tree-inline.h"
39 #include "intl.h"
40 #include "tree-iterator.h"
41 #include "omp-general.h"
42 #include "convert.h"
43 #include "stringpool.h"
44 #include "attribs.h"
45 #include "gomp-constants.h"
46 #include "predict.h"
47 #include "memmodel.h"
48
49 /* There routines provide a modular interface to perform many parsing
50 operations. They may therefore be used during actual parsing, or
51 during template instantiation, which may be regarded as a
52 degenerate form of parsing. */
53
54 static tree maybe_convert_cond (tree);
55 static tree finalize_nrv_r (tree *, int *, void *);
56 static tree capture_decltype (tree);
57
58 /* Used for OpenMP non-static data member privatization. */
59
60 static hash_map<tree, tree> *omp_private_member_map;
61 static vec<tree> omp_private_member_vec;
62 static bool omp_private_member_ignore_next;
63
64
65 /* Deferred Access Checking Overview
66 ---------------------------------
67
68 Most C++ expressions and declarations require access checking
69 to be performed during parsing. However, in several cases,
70 this has to be treated differently.
71
72 For member declarations, access checking has to be deferred
73 until more information about the declaration is known. For
74 example:
75
76 class A {
77 typedef int X;
78 public:
79 X f();
80 };
81
82 A::X A::f();
83 A::X g();
84
85 When we are parsing the function return type `A::X', we don't
86 really know if this is allowed until we parse the function name.
87
88 Furthermore, some contexts require that access checking is
89 never performed at all. These include class heads, and template
90 instantiations.
91
92 Typical use of access checking functions is described here:
93
94 1. When we enter a context that requires certain access checking
95 mode, the function `push_deferring_access_checks' is called with
96 DEFERRING argument specifying the desired mode. Access checking
97 may be performed immediately (dk_no_deferred), deferred
98 (dk_deferred), or not performed (dk_no_check).
99
100 2. When a declaration such as a type, or a variable, is encountered,
101 the function `perform_or_defer_access_check' is called. It
102 maintains a vector of all deferred checks.
103
104 3. The global `current_class_type' or `current_function_decl' is then
105 setup by the parser. `enforce_access' relies on these information
106 to check access.
107
108 4. Upon exiting the context mentioned in step 1,
109 `perform_deferred_access_checks' is called to check all declaration
110 stored in the vector. `pop_deferring_access_checks' is then
111 called to restore the previous access checking mode.
112
113 In case of parsing error, we simply call `pop_deferring_access_checks'
114 without `perform_deferred_access_checks'. */
115
116 struct GTY(()) deferred_access {
117 /* A vector representing name-lookups for which we have deferred
118 checking access controls. We cannot check the accessibility of
119 names used in a decl-specifier-seq until we know what is being
120 declared because code like:
121
122 class A {
123 class B {};
124 B* f();
125 }
126
127 A::B* A::f() { return 0; }
128
129 is valid, even though `A::B' is not generally accessible. */
130 vec<deferred_access_check, va_gc> *deferred_access_checks;
131
132 /* The current mode of access checks. */
133 enum deferring_kind deferring_access_checks_kind;
134 };
135
136 /* Data for deferred access checking. */
137 static GTY(()) vec<deferred_access, va_gc> *deferred_access_stack;
138 static GTY(()) unsigned deferred_access_no_check;
139
140 /* Save the current deferred access states and start deferred
141 access checking iff DEFER_P is true. */
142
143 void
push_deferring_access_checks(deferring_kind deferring)144 push_deferring_access_checks (deferring_kind deferring)
145 {
146 /* For context like template instantiation, access checking
147 disabling applies to all nested context. */
148 if (deferred_access_no_check || deferring == dk_no_check)
149 deferred_access_no_check++;
150 else
151 {
152 deferred_access e = {NULL, deferring};
153 vec_safe_push (deferred_access_stack, e);
154 }
155 }
156
157 /* Save the current deferred access states and start deferred access
158 checking, continuing the set of deferred checks in CHECKS. */
159
160 void
reopen_deferring_access_checks(vec<deferred_access_check,va_gc> * checks)161 reopen_deferring_access_checks (vec<deferred_access_check, va_gc> * checks)
162 {
163 push_deferring_access_checks (dk_deferred);
164 if (!deferred_access_no_check)
165 deferred_access_stack->last().deferred_access_checks = checks;
166 }
167
168 /* Resume deferring access checks again after we stopped doing
169 this previously. */
170
171 void
resume_deferring_access_checks(void)172 resume_deferring_access_checks (void)
173 {
174 if (!deferred_access_no_check)
175 deferred_access_stack->last().deferring_access_checks_kind = dk_deferred;
176 }
177
178 /* Stop deferring access checks. */
179
180 void
stop_deferring_access_checks(void)181 stop_deferring_access_checks (void)
182 {
183 if (!deferred_access_no_check)
184 deferred_access_stack->last().deferring_access_checks_kind = dk_no_deferred;
185 }
186
187 /* Discard the current deferred access checks and restore the
188 previous states. */
189
190 void
pop_deferring_access_checks(void)191 pop_deferring_access_checks (void)
192 {
193 if (deferred_access_no_check)
194 deferred_access_no_check--;
195 else
196 deferred_access_stack->pop ();
197 }
198
199 /* Returns a TREE_LIST representing the deferred checks.
200 The TREE_PURPOSE of each node is the type through which the
201 access occurred; the TREE_VALUE is the declaration named.
202 */
203
204 vec<deferred_access_check, va_gc> *
get_deferred_access_checks(void)205 get_deferred_access_checks (void)
206 {
207 if (deferred_access_no_check)
208 return NULL;
209 else
210 return (deferred_access_stack->last().deferred_access_checks);
211 }
212
213 /* Take current deferred checks and combine with the
214 previous states if we also defer checks previously.
215 Otherwise perform checks now. */
216
217 void
pop_to_parent_deferring_access_checks(void)218 pop_to_parent_deferring_access_checks (void)
219 {
220 if (deferred_access_no_check)
221 deferred_access_no_check--;
222 else
223 {
224 vec<deferred_access_check, va_gc> *checks;
225 deferred_access *ptr;
226
227 checks = (deferred_access_stack->last ().deferred_access_checks);
228
229 deferred_access_stack->pop ();
230 ptr = &deferred_access_stack->last ();
231 if (ptr->deferring_access_checks_kind == dk_no_deferred)
232 {
233 /* Check access. */
234 perform_access_checks (checks, tf_warning_or_error);
235 }
236 else
237 {
238 /* Merge with parent. */
239 int i, j;
240 deferred_access_check *chk, *probe;
241
242 FOR_EACH_VEC_SAFE_ELT (checks, i, chk)
243 {
244 FOR_EACH_VEC_SAFE_ELT (ptr->deferred_access_checks, j, probe)
245 {
246 if (probe->binfo == chk->binfo &&
247 probe->decl == chk->decl &&
248 probe->diag_decl == chk->diag_decl)
249 goto found;
250 }
251 /* Insert into parent's checks. */
252 vec_safe_push (ptr->deferred_access_checks, *chk);
253 found:;
254 }
255 }
256 }
257 }
258
259 /* Called from enforce_access. A class has attempted (but failed) to access
260 DECL. It is already established that a baseclass of that class,
261 PARENT_BINFO, has private access to DECL. Examine certain special cases
262 to find a decl that accurately describes the source of the problem. If
263 none of the special cases apply, simply return DECL as the source of the
264 problem. */
265
266 static tree
get_class_access_diagnostic_decl(tree parent_binfo,tree decl)267 get_class_access_diagnostic_decl (tree parent_binfo, tree decl)
268 {
269 /* When a class is denied access to a decl in a baseclass, most of the
270 time it is because the decl itself was declared as private at the point
271 of declaration.
272
273 However, in C++, there are (at least) two situations in which a decl
274 can be private even though it was not originally defined as such.
275 These two situations only apply if a baseclass had private access to
276 DECL (this function is only called if that is the case). */
277
278 /* We should first check whether the reason the parent had private access
279 to DECL was simply because DECL was created and declared as private in
280 the parent. If it was, then DECL is definitively the source of the
281 problem. */
282 if (SAME_BINFO_TYPE_P (context_for_name_lookup (decl),
283 BINFO_TYPE (parent_binfo)))
284 return decl;
285
286 /* 1. If the "using" keyword is used to inherit DECL within the parent,
287 this may cause DECL to be private, so we should return the using
288 statement as the source of the problem.
289
290 Scan the fields of PARENT_BINFO and see if there are any using decls. If
291 there are, see if they inherit DECL. If they do, that's where DECL must
292 have been declared private. */
293
294 for (tree parent_field = TYPE_FIELDS (BINFO_TYPE (parent_binfo));
295 parent_field;
296 parent_field = DECL_CHAIN (parent_field))
297 /* Not necessary, but also check TREE_PRIVATE for the sake of
298 eliminating obviously non-relevant using decls. */
299 if (TREE_CODE (parent_field) == USING_DECL
300 && TREE_PRIVATE (parent_field))
301 {
302 tree decl_stripped = strip_using_decl (parent_field);
303
304 /* The using statement might be overloaded. If so, we need to
305 check all of the overloads. */
306 for (ovl_iterator iter (decl_stripped); iter; ++iter)
307 /* If equal, the using statement inherits DECL, and so is the
308 source of the access failure, so return it. */
309 if (*iter == decl)
310 return parent_field;
311 }
312
313 /* 2. If DECL was privately inherited by the parent class, then DECL will
314 be inaccessible, even though it may originally have been accessible to
315 deriving classes. In that case, the fault lies with the parent, since it
316 used a private inheritance, so we return the parent as the source of the
317 problem.
318
319 Since this is the last check, we just assume it's true. At worst, it
320 will simply point to the class that failed to give access, which is
321 technically true. */
322 return TYPE_NAME (BINFO_TYPE (parent_binfo));
323 }
324
325 /* If the current scope isn't allowed to access DECL along
326 BASETYPE_PATH, give an error, or if we're parsing a function or class
327 template, defer the access check to be performed at instantiation time.
328 The most derived class in BASETYPE_PATH is the one used to qualify DECL.
329 DIAG_DECL is the declaration to use in the error diagnostic. */
330
331 static bool
enforce_access(tree basetype_path,tree decl,tree diag_decl,tsubst_flags_t complain,access_failure_info * afi=NULL)332 enforce_access (tree basetype_path, tree decl, tree diag_decl,
333 tsubst_flags_t complain, access_failure_info *afi = NULL)
334 {
335 gcc_assert (TREE_CODE (basetype_path) == TREE_BINFO);
336
337 if (flag_new_inheriting_ctors
338 && DECL_INHERITED_CTOR (decl))
339 {
340 /* 7.3.3/18: The additional constructors are accessible if they would be
341 accessible when used to construct an object of the corresponding base
342 class. */
343 decl = strip_inheriting_ctors (decl);
344 basetype_path = lookup_base (basetype_path, DECL_CONTEXT (decl),
345 ba_any, NULL, complain);
346 }
347
348 tree cs = current_scope ();
349 if (processing_template_decl
350 && (CLASS_TYPE_P (cs) || TREE_CODE (cs) == FUNCTION_DECL))
351 if (tree template_info = get_template_info (cs))
352 {
353 /* When parsing a function or class template, we in general need to
354 defer access checks until template instantiation time, since a friend
355 declaration may grant access only to a particular specialization of
356 the template. */
357
358 if (accessible_p (basetype_path, decl, /*consider_local_p=*/true))
359 /* But if the member is deemed accessible at parse time, then we can
360 assume it'll be accessible at instantiation time. */
361 return true;
362
363 /* Access of a dependent decl should be rechecked after tsubst'ing
364 into the user of the decl, rather than explicitly deferring the
365 check here. */
366 gcc_assert (!uses_template_parms (decl));
367 if (TREE_CODE (decl) == FIELD_DECL)
368 gcc_assert (!uses_template_parms (DECL_CONTEXT (decl)));
369
370 /* Defer this access check until instantiation time. */
371 deferred_access_check access_check;
372 access_check.binfo = basetype_path;
373 access_check.decl = decl;
374 access_check.diag_decl = diag_decl;
375 access_check.loc = input_location;
376 vec_safe_push (TI_DEFERRED_ACCESS_CHECKS (template_info), access_check);
377 return true;
378 }
379
380 if (!accessible_p (basetype_path, decl, /*consider_local_p=*/true))
381 {
382 if (flag_new_inheriting_ctors)
383 diag_decl = strip_inheriting_ctors (diag_decl);
384 if (complain & tf_error)
385 {
386 access_kind access_failure_reason = ak_none;
387
388 /* By default, using the decl as the source of the problem will
389 usually give correct results. */
390 tree diag_location = diag_decl;
391
392 /* However, if a parent of BASETYPE_PATH had private access to decl,
393 then it actually might be the case that the source of the problem
394 is not DECL. */
395 tree parent_binfo = get_parent_with_private_access (decl,
396 basetype_path);
397
398 /* So if a parent did have private access, then we need to do
399 special checks to obtain the best diagnostic location decl. */
400 if (parent_binfo != NULL_TREE)
401 {
402 diag_location = get_class_access_diagnostic_decl (parent_binfo,
403 diag_decl);
404
405 /* We also at this point know that the reason access failed was
406 because decl was private. */
407 access_failure_reason = ak_private;
408 }
409
410 /* Finally, generate an error message. */
411 complain_about_access (decl, diag_decl, diag_location, true,
412 access_failure_reason);
413 }
414 if (afi)
415 afi->record_access_failure (basetype_path, decl, diag_decl);
416 return false;
417 }
418
419 return true;
420 }
421
422 /* Perform the access checks in CHECKS. The TREE_PURPOSE of each node
423 is the BINFO indicating the qualifying scope used to access the
424 DECL node stored in the TREE_VALUE of the node. If CHECKS is empty
425 or we aren't in SFINAE context or all the checks succeed return TRUE,
426 otherwise FALSE. */
427
428 bool
perform_access_checks(vec<deferred_access_check,va_gc> * checks,tsubst_flags_t complain)429 perform_access_checks (vec<deferred_access_check, va_gc> *checks,
430 tsubst_flags_t complain)
431 {
432 int i;
433 deferred_access_check *chk;
434 location_t loc = input_location;
435 bool ok = true;
436
437 if (!checks)
438 return true;
439
440 FOR_EACH_VEC_SAFE_ELT (checks, i, chk)
441 {
442 input_location = chk->loc;
443 ok &= enforce_access (chk->binfo, chk->decl, chk->diag_decl, complain);
444 }
445
446 input_location = loc;
447 return (complain & tf_error) ? true : ok;
448 }
449
450 /* Perform the deferred access checks.
451
452 After performing the checks, we still have to keep the list
453 `deferred_access_stack->deferred_access_checks' since we may want
454 to check access for them again later in a different context.
455 For example:
456
457 class A {
458 typedef int X;
459 static X a;
460 };
461 A::X A::a, x; // No error for `A::a', error for `x'
462
463 We have to perform deferred access of `A::X', first with `A::a',
464 next with `x'. Return value like perform_access_checks above. */
465
466 bool
perform_deferred_access_checks(tsubst_flags_t complain)467 perform_deferred_access_checks (tsubst_flags_t complain)
468 {
469 return perform_access_checks (get_deferred_access_checks (), complain);
470 }
471
472 /* Defer checking the accessibility of DECL, when looked up in
473 BINFO. DIAG_DECL is the declaration to use to print diagnostics.
474 Return value like perform_access_checks above.
475 If non-NULL, report failures to AFI. */
476
477 bool
perform_or_defer_access_check(tree binfo,tree decl,tree diag_decl,tsubst_flags_t complain,access_failure_info * afi)478 perform_or_defer_access_check (tree binfo, tree decl, tree diag_decl,
479 tsubst_flags_t complain,
480 access_failure_info *afi)
481 {
482 int i;
483 deferred_access *ptr;
484 deferred_access_check *chk;
485
486 /* Exit if we are in a context that no access checking is performed. */
487 if (deferred_access_no_check)
488 return true;
489
490 gcc_assert (TREE_CODE (binfo) == TREE_BINFO);
491
492 ptr = &deferred_access_stack->last ();
493
494 /* If we are not supposed to defer access checks, just check now. */
495 if (ptr->deferring_access_checks_kind == dk_no_deferred)
496 {
497 bool ok = enforce_access (binfo, decl, diag_decl, complain, afi);
498 return (complain & tf_error) ? true : ok;
499 }
500
501 /* See if we are already going to perform this check. */
502 FOR_EACH_VEC_SAFE_ELT (ptr->deferred_access_checks, i, chk)
503 {
504 if (chk->decl == decl && chk->binfo == binfo &&
505 chk->diag_decl == diag_decl)
506 {
507 return true;
508 }
509 }
510 /* If not, record the check. */
511 deferred_access_check new_access = {binfo, decl, diag_decl, input_location};
512 vec_safe_push (ptr->deferred_access_checks, new_access);
513
514 return true;
515 }
516
517 /* Returns nonzero if the current statement is a full expression,
518 i.e. temporaries created during that statement should be destroyed
519 at the end of the statement. */
520
521 int
stmts_are_full_exprs_p(void)522 stmts_are_full_exprs_p (void)
523 {
524 return current_stmt_tree ()->stmts_are_full_exprs_p;
525 }
526
527 /* T is a statement. Add it to the statement-tree. This is the C++
528 version. The C/ObjC frontends have a slightly different version of
529 this function. */
530
531 tree
add_stmt(tree t)532 add_stmt (tree t)
533 {
534 enum tree_code code = TREE_CODE (t);
535
536 if (EXPR_P (t) && code != LABEL_EXPR)
537 {
538 if (!EXPR_HAS_LOCATION (t))
539 SET_EXPR_LOCATION (t, input_location);
540
541 /* When we expand a statement-tree, we must know whether or not the
542 statements are full-expressions. We record that fact here. */
543 if (STATEMENT_CODE_P (TREE_CODE (t)))
544 STMT_IS_FULL_EXPR_P (t) = stmts_are_full_exprs_p ();
545 }
546
547 if (code == LABEL_EXPR || code == CASE_LABEL_EXPR)
548 STATEMENT_LIST_HAS_LABEL (cur_stmt_list) = 1;
549
550 /* Add T to the statement-tree. Non-side-effect statements need to be
551 recorded during statement expressions. */
552 gcc_checking_assert (!stmt_list_stack->is_empty ());
553 append_to_statement_list_force (t, &cur_stmt_list);
554
555 return t;
556 }
557
558 /* Returns the stmt_tree to which statements are currently being added. */
559
560 stmt_tree
current_stmt_tree(void)561 current_stmt_tree (void)
562 {
563 return (cfun
564 ? &cfun->language->base.x_stmt_tree
565 : &scope_chain->x_stmt_tree);
566 }
567
568 /* If statements are full expressions, wrap STMT in a CLEANUP_POINT_EXPR. */
569
570 static tree
maybe_cleanup_point_expr(tree expr)571 maybe_cleanup_point_expr (tree expr)
572 {
573 if (!processing_template_decl && stmts_are_full_exprs_p ())
574 expr = fold_build_cleanup_point_expr (TREE_TYPE (expr), expr);
575 return expr;
576 }
577
578 /* Like maybe_cleanup_point_expr except have the type of the new expression be
579 void so we don't need to create a temporary variable to hold the inner
580 expression. The reason why we do this is because the original type might be
581 an aggregate and we cannot create a temporary variable for that type. */
582
583 tree
maybe_cleanup_point_expr_void(tree expr)584 maybe_cleanup_point_expr_void (tree expr)
585 {
586 if (!processing_template_decl && stmts_are_full_exprs_p ())
587 expr = fold_build_cleanup_point_expr (void_type_node, expr);
588 return expr;
589 }
590
591
592
593 /* Create a declaration statement for the declaration given by the DECL. */
594
595 void
add_decl_expr(tree decl)596 add_decl_expr (tree decl)
597 {
598 tree r = build_stmt (DECL_SOURCE_LOCATION (decl), DECL_EXPR, decl);
599 if (DECL_INITIAL (decl)
600 || (DECL_SIZE (decl) && TREE_SIDE_EFFECTS (DECL_SIZE (decl))))
601 r = maybe_cleanup_point_expr_void (r);
602 add_stmt (r);
603 }
604
605 /* Set EXPR_LOCATION of the cleanups of any CLEANUP_STMT in STMTS to LOC. */
606
607 static void
set_cleanup_locs(tree stmts,location_t loc)608 set_cleanup_locs (tree stmts, location_t loc)
609 {
610 if (TREE_CODE (stmts) == CLEANUP_STMT)
611 {
612 tree t = CLEANUP_EXPR (stmts);
613 protected_set_expr_location (t, loc);
614 /* Avoid locus differences for C++ cdtor calls depending on whether
615 cdtor_returns_this: a conversion to void is added to discard the return
616 value, and this conversion ends up carrying the location, and when it
617 gets discarded, the location is lost. So hold it in the call as
618 well. */
619 if (TREE_CODE (t) == NOP_EXPR
620 && TREE_TYPE (t) == void_type_node
621 && TREE_CODE (TREE_OPERAND (t, 0)) == CALL_EXPR)
622 protected_set_expr_location (TREE_OPERAND (t, 0), loc);
623 set_cleanup_locs (CLEANUP_BODY (stmts), loc);
624 }
625 else if (TREE_CODE (stmts) == STATEMENT_LIST)
626 for (tree stmt : tsi_range (stmts))
627 set_cleanup_locs (stmt, loc);
628 }
629
630 /* True iff the innermost block scope is a try block. */
631
632 static bool
at_try_scope()633 at_try_scope ()
634 {
635 cp_binding_level *b = current_binding_level;
636 while (b && b->kind == sk_cleanup)
637 b = b->level_chain;
638 return b && b->kind == sk_try;
639 }
640
641 /* Finish a scope. */
642
643 tree
do_poplevel(tree stmt_list)644 do_poplevel (tree stmt_list)
645 {
646 tree block = NULL;
647
648 bool was_try = at_try_scope ();
649
650 if (stmts_are_full_exprs_p ())
651 block = poplevel (kept_level_p (), 1, 0);
652
653 /* This needs to come after poplevel merges sk_cleanup statement_lists. */
654 maybe_splice_retval_cleanup (stmt_list, was_try);
655
656 stmt_list = pop_stmt_list (stmt_list);
657
658 /* input_location is the last token of the scope, usually a }. */
659 set_cleanup_locs (stmt_list, input_location);
660
661 if (!processing_template_decl)
662 {
663 stmt_list = c_build_bind_expr (input_location, block, stmt_list);
664 /* ??? See c_end_compound_stmt re statement expressions. */
665 }
666
667 return stmt_list;
668 }
669
670 /* Begin a new scope. */
671
672 static tree
do_pushlevel(scope_kind sk)673 do_pushlevel (scope_kind sk)
674 {
675 tree ret = push_stmt_list ();
676 if (stmts_are_full_exprs_p ())
677 begin_scope (sk, NULL);
678 return ret;
679 }
680
681 /* Queue a cleanup. CLEANUP is an expression/statement to be executed
682 when the current scope is exited. EH_ONLY is true when this is not
683 meant to apply to normal control flow transfer. DECL is the VAR_DECL
684 being cleaned up, if any, or null for temporaries or subobjects. */
685
686 void
push_cleanup(tree decl,tree cleanup,bool eh_only)687 push_cleanup (tree decl, tree cleanup, bool eh_only)
688 {
689 tree stmt = build_stmt (input_location, CLEANUP_STMT, NULL, cleanup, decl);
690 CLEANUP_EH_ONLY (stmt) = eh_only;
691 add_stmt (stmt);
692 CLEANUP_BODY (stmt) = push_stmt_list ();
693 }
694
695 /* Simple infinite loop tracking for -Wreturn-type. We keep a stack of all
696 the current loops, represented by 'NULL_TREE' if we've seen a possible
697 exit, and 'error_mark_node' if not. This is currently used only to
698 suppress the warning about a function with no return statements, and
699 therefore we don't bother noting returns as possible exits. We also
700 don't bother with gotos. */
701
702 static void
begin_maybe_infinite_loop(tree cond)703 begin_maybe_infinite_loop (tree cond)
704 {
705 /* Only track this while parsing a function, not during instantiation. */
706 if (!cfun || (DECL_TEMPLATE_INSTANTIATION (current_function_decl)
707 && !processing_template_decl))
708 return;
709 bool maybe_infinite = true;
710 if (cond)
711 {
712 cond = fold_non_dependent_expr (cond);
713 maybe_infinite = integer_nonzerop (cond);
714 }
715 vec_safe_push (cp_function_chain->infinite_loops,
716 maybe_infinite ? error_mark_node : NULL_TREE);
717
718 }
719
720 /* A break is a possible exit for the current loop. */
721
722 void
break_maybe_infinite_loop(void)723 break_maybe_infinite_loop (void)
724 {
725 if (!cfun)
726 return;
727 cp_function_chain->infinite_loops->last() = NULL_TREE;
728 }
729
730 /* If we reach the end of the loop without seeing a possible exit, we have
731 an infinite loop. */
732
733 static void
end_maybe_infinite_loop(tree cond)734 end_maybe_infinite_loop (tree cond)
735 {
736 if (!cfun || (DECL_TEMPLATE_INSTANTIATION (current_function_decl)
737 && !processing_template_decl))
738 return;
739 tree current = cp_function_chain->infinite_loops->pop();
740 if (current != NULL_TREE)
741 {
742 cond = fold_non_dependent_expr (cond);
743 if (integer_nonzerop (cond))
744 current_function_infinite_loop = 1;
745 }
746 }
747
748
749 /* Begin a conditional that might contain a declaration. When generating
750 normal code, we want the declaration to appear before the statement
751 containing the conditional. When generating template code, we want the
752 conditional to be rendered as the raw DECL_EXPR. */
753
754 static void
begin_cond(tree * cond_p)755 begin_cond (tree *cond_p)
756 {
757 if (processing_template_decl)
758 *cond_p = push_stmt_list ();
759 }
760
761 /* Finish such a conditional. */
762
763 static void
finish_cond(tree * cond_p,tree expr)764 finish_cond (tree *cond_p, tree expr)
765 {
766 if (processing_template_decl)
767 {
768 tree cond = pop_stmt_list (*cond_p);
769
770 if (expr == NULL_TREE)
771 /* Empty condition in 'for'. */
772 gcc_assert (empty_expr_stmt_p (cond));
773 else if (check_for_bare_parameter_packs (expr))
774 expr = error_mark_node;
775 else if (!empty_expr_stmt_p (cond))
776 expr = build2 (COMPOUND_EXPR, TREE_TYPE (expr), cond, expr);
777 }
778 *cond_p = expr;
779 }
780
781 /* If *COND_P specifies a conditional with a declaration, transform the
782 loop such that
783 while (A x = 42) { }
784 for (; A x = 42;) { }
785 becomes
786 while (true) { A x = 42; if (!x) break; }
787 for (;;) { A x = 42; if (!x) break; }
788 The statement list for BODY will be empty if the conditional did
789 not declare anything. */
790
791 static void
simplify_loop_decl_cond(tree * cond_p,tree body)792 simplify_loop_decl_cond (tree *cond_p, tree body)
793 {
794 tree cond, if_stmt;
795
796 if (!TREE_SIDE_EFFECTS (body))
797 return;
798
799 cond = *cond_p;
800 *cond_p = boolean_true_node;
801
802 if_stmt = begin_if_stmt ();
803 cond_p = &cond;
804 while (TREE_CODE (*cond_p) == ANNOTATE_EXPR)
805 cond_p = &TREE_OPERAND (*cond_p, 0);
806 *cond_p = cp_build_unary_op (TRUTH_NOT_EXPR, *cond_p, false,
807 tf_warning_or_error);
808 finish_if_stmt_cond (cond, if_stmt);
809 finish_break_stmt ();
810 finish_then_clause (if_stmt);
811 finish_if_stmt (if_stmt);
812 }
813
814 /* Finish a goto-statement. */
815
816 tree
finish_goto_stmt(tree destination)817 finish_goto_stmt (tree destination)
818 {
819 if (identifier_p (destination))
820 destination = lookup_label (destination);
821
822 /* We warn about unused labels with -Wunused. That means we have to
823 mark the used labels as used. */
824 if (TREE_CODE (destination) == LABEL_DECL)
825 TREE_USED (destination) = 1;
826 else
827 {
828 destination = mark_rvalue_use (destination);
829 if (!processing_template_decl)
830 {
831 destination = cp_convert (ptr_type_node, destination,
832 tf_warning_or_error);
833 if (error_operand_p (destination))
834 return NULL_TREE;
835 destination
836 = fold_build_cleanup_point_expr (TREE_TYPE (destination),
837 destination);
838 }
839 }
840
841 check_goto (destination);
842
843 add_stmt (build_predict_expr (PRED_GOTO, NOT_TAKEN));
844 return add_stmt (build_stmt (input_location, GOTO_EXPR, destination));
845 }
846
847 /* COND is the condition-expression for an if, while, etc.,
848 statement. Convert it to a boolean value, if appropriate.
849 In addition, verify sequence points if -Wsequence-point is enabled. */
850
851 static tree
maybe_convert_cond(tree cond)852 maybe_convert_cond (tree cond)
853 {
854 /* Empty conditions remain empty. */
855 if (!cond)
856 return NULL_TREE;
857
858 /* Wait until we instantiate templates before doing conversion. */
859 if (type_dependent_expression_p (cond))
860 return cond;
861
862 if (warn_sequence_point && !processing_template_decl)
863 verify_sequence_points (cond);
864
865 /* Do the conversion. */
866 cond = convert_from_reference (cond);
867
868 if (TREE_CODE (cond) == MODIFY_EXPR
869 && warn_parentheses
870 && !warning_suppressed_p (cond, OPT_Wparentheses)
871 && warning_at (cp_expr_loc_or_input_loc (cond),
872 OPT_Wparentheses, "suggest parentheses around "
873 "assignment used as truth value"))
874 suppress_warning (cond, OPT_Wparentheses);
875
876 return condition_conversion (cond);
877 }
878
879 /* Finish an expression-statement, whose EXPRESSION is as indicated. */
880
881 tree
finish_expr_stmt(tree expr)882 finish_expr_stmt (tree expr)
883 {
884 tree r = NULL_TREE;
885 location_t loc = EXPR_LOCATION (expr);
886
887 if (expr != NULL_TREE)
888 {
889 /* If we ran into a problem, make sure we complained. */
890 gcc_assert (expr != error_mark_node || seen_error ());
891
892 if (!processing_template_decl)
893 {
894 if (warn_sequence_point)
895 verify_sequence_points (expr);
896 expr = convert_to_void (expr, ICV_STATEMENT, tf_warning_or_error);
897 }
898 else if (!type_dependent_expression_p (expr))
899 convert_to_void (build_non_dependent_expr (expr), ICV_STATEMENT,
900 tf_warning_or_error);
901
902 if (check_for_bare_parameter_packs (expr))
903 expr = error_mark_node;
904
905 /* Simplification of inner statement expressions, compound exprs,
906 etc can result in us already having an EXPR_STMT. */
907 if (TREE_CODE (expr) != CLEANUP_POINT_EXPR)
908 {
909 if (TREE_CODE (expr) != EXPR_STMT)
910 expr = build_stmt (loc, EXPR_STMT, expr);
911 expr = maybe_cleanup_point_expr_void (expr);
912 }
913
914 r = add_stmt (expr);
915 }
916
917 return r;
918 }
919
920
921 /* Begin an if-statement. Returns a newly created IF_STMT if
922 appropriate. */
923
924 tree
begin_if_stmt(void)925 begin_if_stmt (void)
926 {
927 tree r, scope;
928 scope = do_pushlevel (sk_cond);
929 r = build_stmt (input_location, IF_STMT, NULL_TREE,
930 NULL_TREE, NULL_TREE, scope);
931 current_binding_level->this_entity = r;
932 begin_cond (&IF_COND (r));
933 return r;
934 }
935
936 /* Returns true if FN, a CALL_EXPR, is a call to
937 std::is_constant_evaluated or __builtin_is_constant_evaluated. */
938
939 static bool
is_std_constant_evaluated_p(tree fn)940 is_std_constant_evaluated_p (tree fn)
941 {
942 /* std::is_constant_evaluated takes no arguments. */
943 if (call_expr_nargs (fn) != 0)
944 return false;
945
946 tree fndecl = cp_get_callee_fndecl_nofold (fn);
947 if (fndecl == NULL_TREE)
948 return false;
949
950 if (fndecl_built_in_p (fndecl, CP_BUILT_IN_IS_CONSTANT_EVALUATED,
951 BUILT_IN_FRONTEND))
952 return true;
953
954 if (!decl_in_std_namespace_p (fndecl))
955 return false;
956
957 tree name = DECL_NAME (fndecl);
958 return name && id_equal (name, "is_constant_evaluated");
959 }
960
961 /* Callback function for maybe_warn_for_constant_evaluated that looks
962 for calls to std::is_constant_evaluated in TP. */
963
964 static tree
find_std_constant_evaluated_r(tree * tp,int * walk_subtrees,void *)965 find_std_constant_evaluated_r (tree *tp, int *walk_subtrees, void *)
966 {
967 tree t = *tp;
968
969 if (TYPE_P (t) || TREE_CONSTANT (t))
970 {
971 *walk_subtrees = false;
972 return NULL_TREE;
973 }
974
975 switch (TREE_CODE (t))
976 {
977 case CALL_EXPR:
978 if (is_std_constant_evaluated_p (t))
979 return t;
980 break;
981 case EXPR_STMT:
982 /* Don't warn in statement expressions. */
983 *walk_subtrees = false;
984 return NULL_TREE;
985 default:
986 break;
987 }
988
989 return NULL_TREE;
990 }
991
992 /* In certain contexts, std::is_constant_evaluated() is always true (for
993 instance, in a consteval function or in a constexpr if), or always false
994 (e.g., in a non-constexpr non-consteval function) so give the user a clue. */
995
996 static void
maybe_warn_for_constant_evaluated(tree cond,bool constexpr_if)997 maybe_warn_for_constant_evaluated (tree cond, bool constexpr_if)
998 {
999 if (!warn_tautological_compare)
1000 return;
1001
1002 /* Suppress warning for std::is_constant_evaluated if the conditional
1003 comes from a macro. */
1004 if (from_macro_expansion_at (EXPR_LOCATION (cond)))
1005 return;
1006
1007 cond = cp_walk_tree_without_duplicates (&cond, find_std_constant_evaluated_r,
1008 NULL);
1009 if (cond)
1010 {
1011 if (constexpr_if)
1012 warning_at (EXPR_LOCATION (cond), OPT_Wtautological_compare,
1013 "%<std::is_constant_evaluated%> always evaluates to "
1014 "true in %<if constexpr%>");
1015 else if (!maybe_constexpr_fn (current_function_decl))
1016 warning_at (EXPR_LOCATION (cond), OPT_Wtautological_compare,
1017 "%<std::is_constant_evaluated%> always evaluates to "
1018 "false in a non-%<constexpr%> function");
1019 else if (DECL_IMMEDIATE_FUNCTION_P (current_function_decl))
1020 warning_at (EXPR_LOCATION (cond), OPT_Wtautological_compare,
1021 "%<std::is_constant_evaluated%> always evaluates to "
1022 "true in a %<consteval%> function");
1023 }
1024 }
1025
1026 /* Process the COND of an if-statement, which may be given by
1027 IF_STMT. */
1028
1029 tree
finish_if_stmt_cond(tree cond,tree if_stmt)1030 finish_if_stmt_cond (tree cond, tree if_stmt)
1031 {
1032 cond = maybe_convert_cond (cond);
1033 maybe_warn_for_constant_evaluated (cond, IF_STMT_CONSTEXPR_P (if_stmt));
1034 if (IF_STMT_CONSTEXPR_P (if_stmt)
1035 && !type_dependent_expression_p (cond)
1036 && require_constant_expression (cond)
1037 && !instantiation_dependent_expression_p (cond)
1038 /* Wait until instantiation time, since only then COND has been
1039 converted to bool. */
1040 && TYPE_MAIN_VARIANT (TREE_TYPE (cond)) == boolean_type_node)
1041 {
1042 cond = instantiate_non_dependent_expr (cond);
1043 cond = cxx_constant_value (cond, NULL_TREE);
1044 }
1045 finish_cond (&IF_COND (if_stmt), cond);
1046 add_stmt (if_stmt);
1047 THEN_CLAUSE (if_stmt) = push_stmt_list ();
1048 return cond;
1049 }
1050
1051 /* Finish the then-clause of an if-statement, which may be given by
1052 IF_STMT. */
1053
1054 tree
finish_then_clause(tree if_stmt)1055 finish_then_clause (tree if_stmt)
1056 {
1057 THEN_CLAUSE (if_stmt) = pop_stmt_list (THEN_CLAUSE (if_stmt));
1058 return if_stmt;
1059 }
1060
1061 /* Begin the else-clause of an if-statement. */
1062
1063 void
begin_else_clause(tree if_stmt)1064 begin_else_clause (tree if_stmt)
1065 {
1066 ELSE_CLAUSE (if_stmt) = push_stmt_list ();
1067 }
1068
1069 /* Finish the else-clause of an if-statement, which may be given by
1070 IF_STMT. */
1071
1072 void
finish_else_clause(tree if_stmt)1073 finish_else_clause (tree if_stmt)
1074 {
1075 ELSE_CLAUSE (if_stmt) = pop_stmt_list (ELSE_CLAUSE (if_stmt));
1076 }
1077
1078 /* Callback for cp_walk_tree to mark all {VAR,PARM}_DECLs in a tree as
1079 read. */
1080
1081 static tree
maybe_mark_exp_read_r(tree * tp,int *,void *)1082 maybe_mark_exp_read_r (tree *tp, int *, void *)
1083 {
1084 tree t = *tp;
1085 if (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
1086 mark_exp_read (t);
1087 return NULL_TREE;
1088 }
1089
1090 /* Finish an if-statement. */
1091
1092 void
finish_if_stmt(tree if_stmt)1093 finish_if_stmt (tree if_stmt)
1094 {
1095 tree scope = IF_SCOPE (if_stmt);
1096 IF_SCOPE (if_stmt) = NULL;
1097 if (IF_STMT_CONSTEXPR_P (if_stmt))
1098 {
1099 /* Prevent various -Wunused warnings. We might not instantiate
1100 either of these branches, so we would not mark the variables
1101 used in that branch as read. */
1102 cp_walk_tree_without_duplicates (&THEN_CLAUSE (if_stmt),
1103 maybe_mark_exp_read_r, NULL);
1104 cp_walk_tree_without_duplicates (&ELSE_CLAUSE (if_stmt),
1105 maybe_mark_exp_read_r, NULL);
1106 }
1107 add_stmt (do_poplevel (scope));
1108 }
1109
1110 /* Begin a while-statement. Returns a newly created WHILE_STMT if
1111 appropriate. */
1112
1113 tree
begin_while_stmt(void)1114 begin_while_stmt (void)
1115 {
1116 tree r;
1117 r = build_stmt (input_location, WHILE_STMT, NULL_TREE, NULL_TREE);
1118 add_stmt (r);
1119 WHILE_BODY (r) = do_pushlevel (sk_block);
1120 begin_cond (&WHILE_COND (r));
1121 return r;
1122 }
1123
1124 /* Process the COND of a while-statement, which may be given by
1125 WHILE_STMT. */
1126
1127 void
finish_while_stmt_cond(tree cond,tree while_stmt,bool ivdep,unsigned short unroll)1128 finish_while_stmt_cond (tree cond, tree while_stmt, bool ivdep,
1129 unsigned short unroll)
1130 {
1131 cond = maybe_convert_cond (cond);
1132 finish_cond (&WHILE_COND (while_stmt), cond);
1133 begin_maybe_infinite_loop (cond);
1134 if (ivdep && cond != error_mark_node)
1135 WHILE_COND (while_stmt) = build3 (ANNOTATE_EXPR,
1136 TREE_TYPE (WHILE_COND (while_stmt)),
1137 WHILE_COND (while_stmt),
1138 build_int_cst (integer_type_node,
1139 annot_expr_ivdep_kind),
1140 integer_zero_node);
1141 if (unroll && cond != error_mark_node)
1142 WHILE_COND (while_stmt) = build3 (ANNOTATE_EXPR,
1143 TREE_TYPE (WHILE_COND (while_stmt)),
1144 WHILE_COND (while_stmt),
1145 build_int_cst (integer_type_node,
1146 annot_expr_unroll_kind),
1147 build_int_cst (integer_type_node,
1148 unroll));
1149 simplify_loop_decl_cond (&WHILE_COND (while_stmt), WHILE_BODY (while_stmt));
1150 }
1151
1152 /* Finish a while-statement, which may be given by WHILE_STMT. */
1153
1154 void
finish_while_stmt(tree while_stmt)1155 finish_while_stmt (tree while_stmt)
1156 {
1157 end_maybe_infinite_loop (boolean_true_node);
1158 WHILE_BODY (while_stmt) = do_poplevel (WHILE_BODY (while_stmt));
1159 }
1160
1161 /* Begin a do-statement. Returns a newly created DO_STMT if
1162 appropriate. */
1163
1164 tree
begin_do_stmt(void)1165 begin_do_stmt (void)
1166 {
1167 tree r = build_stmt (input_location, DO_STMT, NULL_TREE, NULL_TREE);
1168 begin_maybe_infinite_loop (boolean_true_node);
1169 add_stmt (r);
1170 DO_BODY (r) = push_stmt_list ();
1171 return r;
1172 }
1173
1174 /* Finish the body of a do-statement, which may be given by DO_STMT. */
1175
1176 void
finish_do_body(tree do_stmt)1177 finish_do_body (tree do_stmt)
1178 {
1179 tree body = DO_BODY (do_stmt) = pop_stmt_list (DO_BODY (do_stmt));
1180
1181 if (TREE_CODE (body) == STATEMENT_LIST && STATEMENT_LIST_TAIL (body))
1182 body = STATEMENT_LIST_TAIL (body)->stmt;
1183
1184 if (IS_EMPTY_STMT (body))
1185 warning (OPT_Wempty_body,
1186 "suggest explicit braces around empty body in %<do%> statement");
1187 }
1188
1189 /* Finish a do-statement, which may be given by DO_STMT, and whose
1190 COND is as indicated. */
1191
1192 void
finish_do_stmt(tree cond,tree do_stmt,bool ivdep,unsigned short unroll)1193 finish_do_stmt (tree cond, tree do_stmt, bool ivdep, unsigned short unroll)
1194 {
1195 cond = maybe_convert_cond (cond);
1196 end_maybe_infinite_loop (cond);
1197 /* Unlike other iteration statements, the condition may not contain
1198 a declaration, so we don't call finish_cond which checks for
1199 unexpanded parameter packs. */
1200 if (check_for_bare_parameter_packs (cond))
1201 cond = error_mark_node;
1202 if (ivdep && cond != error_mark_node)
1203 cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond,
1204 build_int_cst (integer_type_node, annot_expr_ivdep_kind),
1205 integer_zero_node);
1206 if (unroll && cond != error_mark_node)
1207 cond = build3 (ANNOTATE_EXPR, TREE_TYPE (cond), cond,
1208 build_int_cst (integer_type_node, annot_expr_unroll_kind),
1209 build_int_cst (integer_type_node, unroll));
1210 DO_COND (do_stmt) = cond;
1211 }
1212
1213 /* Finish a return-statement. The EXPRESSION returned, if any, is as
1214 indicated. */
1215
1216 tree
finish_return_stmt(tree expr)1217 finish_return_stmt (tree expr)
1218 {
1219 tree r;
1220 bool no_warning;
1221
1222 expr = check_return_expr (expr, &no_warning);
1223
1224 if (error_operand_p (expr)
1225 || (flag_openmp && !check_omp_return ()))
1226 {
1227 /* Suppress -Wreturn-type for this function. */
1228 if (warn_return_type)
1229 suppress_warning (current_function_decl, OPT_Wreturn_type);
1230 return error_mark_node;
1231 }
1232
1233 if (!processing_template_decl)
1234 {
1235 if (warn_sequence_point)
1236 verify_sequence_points (expr);
1237
1238 if (DECL_DESTRUCTOR_P (current_function_decl)
1239 || (DECL_CONSTRUCTOR_P (current_function_decl)
1240 && targetm.cxx.cdtor_returns_this ()))
1241 {
1242 /* Similarly, all destructors must run destructors for
1243 base-classes before returning. So, all returns in a
1244 destructor get sent to the DTOR_LABEL; finish_function emits
1245 code to return a value there. */
1246 return finish_goto_stmt (cdtor_label);
1247 }
1248 }
1249
1250 r = build_stmt (input_location, RETURN_EXPR, expr);
1251 if (no_warning)
1252 suppress_warning (r, OPT_Wreturn_type);
1253 r = maybe_cleanup_point_expr_void (r);
1254 r = add_stmt (r);
1255
1256 return r;
1257 }
1258
1259 /* Begin the scope of a for-statement or a range-for-statement.
1260 Both the returned trees are to be used in a call to
1261 begin_for_stmt or begin_range_for_stmt. */
1262
1263 tree
begin_for_scope(tree * init)1264 begin_for_scope (tree *init)
1265 {
1266 tree scope = do_pushlevel (sk_for);
1267
1268 if (processing_template_decl)
1269 *init = push_stmt_list ();
1270 else
1271 *init = NULL_TREE;
1272
1273 return scope;
1274 }
1275
1276 /* Begin a for-statement. Returns a new FOR_STMT.
1277 SCOPE and INIT should be the return of begin_for_scope,
1278 or both NULL_TREE */
1279
1280 tree
begin_for_stmt(tree scope,tree init)1281 begin_for_stmt (tree scope, tree init)
1282 {
1283 tree r;
1284
1285 r = build_stmt (input_location, FOR_STMT, NULL_TREE, NULL_TREE,
1286 NULL_TREE, NULL_TREE, NULL_TREE);
1287
1288 if (scope == NULL_TREE)
1289 {
1290 gcc_assert (!init);
1291 scope = begin_for_scope (&init);
1292 }
1293
1294 FOR_INIT_STMT (r) = init;
1295 FOR_SCOPE (r) = scope;
1296
1297 return r;
1298 }
1299
1300 /* Finish the init-statement of a for-statement, which may be
1301 given by FOR_STMT. */
1302
1303 void
finish_init_stmt(tree for_stmt)1304 finish_init_stmt (tree for_stmt)
1305 {
1306 if (processing_template_decl)
1307 FOR_INIT_STMT (for_stmt) = pop_stmt_list (FOR_INIT_STMT (for_stmt));
1308 add_stmt (for_stmt);
1309 FOR_BODY (for_stmt) = do_pushlevel (sk_block);
1310 begin_cond (&FOR_COND (for_stmt));
1311 }
1312
1313 /* Finish the COND of a for-statement, which may be given by
1314 FOR_STMT. */
1315
1316 void
finish_for_cond(tree cond,tree for_stmt,bool ivdep,unsigned short unroll)1317 finish_for_cond (tree cond, tree for_stmt, bool ivdep, unsigned short unroll)
1318 {
1319 cond = maybe_convert_cond (cond);
1320 finish_cond (&FOR_COND (for_stmt), cond);
1321 begin_maybe_infinite_loop (cond);
1322 if (ivdep && cond != error_mark_node)
1323 FOR_COND (for_stmt) = build3 (ANNOTATE_EXPR,
1324 TREE_TYPE (FOR_COND (for_stmt)),
1325 FOR_COND (for_stmt),
1326 build_int_cst (integer_type_node,
1327 annot_expr_ivdep_kind),
1328 integer_zero_node);
1329 if (unroll && cond != error_mark_node)
1330 FOR_COND (for_stmt) = build3 (ANNOTATE_EXPR,
1331 TREE_TYPE (FOR_COND (for_stmt)),
1332 FOR_COND (for_stmt),
1333 build_int_cst (integer_type_node,
1334 annot_expr_unroll_kind),
1335 build_int_cst (integer_type_node,
1336 unroll));
1337 simplify_loop_decl_cond (&FOR_COND (for_stmt), FOR_BODY (for_stmt));
1338 }
1339
1340 /* Finish the increment-EXPRESSION in a for-statement, which may be
1341 given by FOR_STMT. */
1342
1343 void
finish_for_expr(tree expr,tree for_stmt)1344 finish_for_expr (tree expr, tree for_stmt)
1345 {
1346 if (!expr)
1347 return;
1348 /* If EXPR is an overloaded function, issue an error; there is no
1349 context available to use to perform overload resolution. */
1350 if (type_unknown_p (expr))
1351 {
1352 cxx_incomplete_type_error (expr, TREE_TYPE (expr));
1353 expr = error_mark_node;
1354 }
1355 if (!processing_template_decl)
1356 {
1357 if (warn_sequence_point)
1358 verify_sequence_points (expr);
1359 expr = convert_to_void (expr, ICV_THIRD_IN_FOR,
1360 tf_warning_or_error);
1361 }
1362 else if (!type_dependent_expression_p (expr))
1363 convert_to_void (build_non_dependent_expr (expr), ICV_THIRD_IN_FOR,
1364 tf_warning_or_error);
1365 expr = maybe_cleanup_point_expr_void (expr);
1366 if (check_for_bare_parameter_packs (expr))
1367 expr = error_mark_node;
1368 FOR_EXPR (for_stmt) = expr;
1369 }
1370
1371 /* Finish the body of a for-statement, which may be given by
1372 FOR_STMT. The increment-EXPR for the loop must be
1373 provided.
1374 It can also finish RANGE_FOR_STMT. */
1375
1376 void
finish_for_stmt(tree for_stmt)1377 finish_for_stmt (tree for_stmt)
1378 {
1379 end_maybe_infinite_loop (boolean_true_node);
1380
1381 if (TREE_CODE (for_stmt) == RANGE_FOR_STMT)
1382 RANGE_FOR_BODY (for_stmt) = do_poplevel (RANGE_FOR_BODY (for_stmt));
1383 else
1384 FOR_BODY (for_stmt) = do_poplevel (FOR_BODY (for_stmt));
1385
1386 /* Pop the scope for the body of the loop. */
1387 tree *scope_ptr = (TREE_CODE (for_stmt) == RANGE_FOR_STMT
1388 ? &RANGE_FOR_SCOPE (for_stmt)
1389 : &FOR_SCOPE (for_stmt));
1390 tree scope = *scope_ptr;
1391 *scope_ptr = NULL;
1392
1393 /* During parsing of the body, range for uses "__for_{range,begin,end} "
1394 decl names to make those unaccessible by code in the body.
1395 Change it to ones with underscore instead of space, so that it can
1396 be inspected in the debugger. */
1397 tree range_for_decl[3] = { NULL_TREE, NULL_TREE, NULL_TREE };
1398 gcc_assert (CPTI_FOR_BEGIN__IDENTIFIER == CPTI_FOR_RANGE__IDENTIFIER + 1
1399 && CPTI_FOR_END__IDENTIFIER == CPTI_FOR_RANGE__IDENTIFIER + 2
1400 && CPTI_FOR_RANGE_IDENTIFIER == CPTI_FOR_RANGE__IDENTIFIER + 3
1401 && CPTI_FOR_BEGIN_IDENTIFIER == CPTI_FOR_BEGIN__IDENTIFIER + 3
1402 && CPTI_FOR_END_IDENTIFIER == CPTI_FOR_END__IDENTIFIER + 3);
1403 for (int i = 0; i < 3; i++)
1404 {
1405 tree id = cp_global_trees[CPTI_FOR_RANGE__IDENTIFIER + i];
1406 if (IDENTIFIER_BINDING (id)
1407 && IDENTIFIER_BINDING (id)->scope == current_binding_level)
1408 {
1409 range_for_decl[i] = IDENTIFIER_BINDING (id)->value;
1410 gcc_assert (VAR_P (range_for_decl[i])
1411 && DECL_ARTIFICIAL (range_for_decl[i]));
1412 }
1413 }
1414
1415 add_stmt (do_poplevel (scope));
1416
1417 /* If we're being called from build_vec_init, don't mess with the names of
1418 the variables for an enclosing range-for. */
1419 if (!stmts_are_full_exprs_p ())
1420 return;
1421
1422 for (int i = 0; i < 3; i++)
1423 if (range_for_decl[i])
1424 DECL_NAME (range_for_decl[i])
1425 = cp_global_trees[CPTI_FOR_RANGE_IDENTIFIER + i];
1426 }
1427
1428 /* Begin a range-for-statement. Returns a new RANGE_FOR_STMT.
1429 SCOPE and INIT should be the return of begin_for_scope,
1430 or both NULL_TREE .
1431 To finish it call finish_for_stmt(). */
1432
1433 tree
begin_range_for_stmt(tree scope,tree init)1434 begin_range_for_stmt (tree scope, tree init)
1435 {
1436 begin_maybe_infinite_loop (boolean_false_node);
1437
1438 tree r = build_stmt (input_location, RANGE_FOR_STMT, NULL_TREE, NULL_TREE,
1439 NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE);
1440
1441 if (scope == NULL_TREE)
1442 {
1443 gcc_assert (!init);
1444 scope = begin_for_scope (&init);
1445 }
1446
1447 /* Since C++20, RANGE_FOR_STMTs can use the init tree, so save it. */
1448 RANGE_FOR_INIT_STMT (r) = init;
1449 RANGE_FOR_SCOPE (r) = scope;
1450
1451 return r;
1452 }
1453
1454 /* Finish the head of a range-based for statement, which may
1455 be given by RANGE_FOR_STMT. DECL must be the declaration
1456 and EXPR must be the loop expression. */
1457
1458 void
finish_range_for_decl(tree range_for_stmt,tree decl,tree expr)1459 finish_range_for_decl (tree range_for_stmt, tree decl, tree expr)
1460 {
1461 if (processing_template_decl)
1462 RANGE_FOR_INIT_STMT (range_for_stmt)
1463 = pop_stmt_list (RANGE_FOR_INIT_STMT (range_for_stmt));
1464 RANGE_FOR_DECL (range_for_stmt) = decl;
1465 RANGE_FOR_EXPR (range_for_stmt) = expr;
1466 add_stmt (range_for_stmt);
1467 RANGE_FOR_BODY (range_for_stmt) = do_pushlevel (sk_block);
1468 }
1469
1470 /* Finish a break-statement. */
1471
1472 tree
finish_break_stmt(void)1473 finish_break_stmt (void)
1474 {
1475 /* In switch statements break is sometimes stylistically used after
1476 a return statement. This can lead to spurious warnings about
1477 control reaching the end of a non-void function when it is
1478 inlined. Note that we are calling block_may_fallthru with
1479 language specific tree nodes; this works because
1480 block_may_fallthru returns true when given something it does not
1481 understand. */
1482 if (!block_may_fallthru (cur_stmt_list))
1483 return void_node;
1484 note_break_stmt ();
1485 return add_stmt (build_stmt (input_location, BREAK_STMT));
1486 }
1487
1488 /* Finish a continue-statement. */
1489
1490 tree
finish_continue_stmt(void)1491 finish_continue_stmt (void)
1492 {
1493 return add_stmt (build_stmt (input_location, CONTINUE_STMT));
1494 }
1495
1496 /* Begin a switch-statement. Returns a new SWITCH_STMT if
1497 appropriate. */
1498
1499 tree
begin_switch_stmt(void)1500 begin_switch_stmt (void)
1501 {
1502 tree r, scope;
1503
1504 scope = do_pushlevel (sk_cond);
1505 r = build_stmt (input_location, SWITCH_STMT, NULL_TREE, NULL_TREE, NULL_TREE, scope);
1506
1507 begin_cond (&SWITCH_STMT_COND (r));
1508
1509 return r;
1510 }
1511
1512 /* Finish the cond of a switch-statement. */
1513
1514 void
finish_switch_cond(tree cond,tree switch_stmt)1515 finish_switch_cond (tree cond, tree switch_stmt)
1516 {
1517 tree orig_type = NULL;
1518
1519 if (!processing_template_decl)
1520 {
1521 /* Convert the condition to an integer or enumeration type. */
1522 tree orig_cond = cond;
1523 cond = build_expr_type_conversion (WANT_INT | WANT_ENUM, cond, true);
1524 if (cond == NULL_TREE)
1525 {
1526 error_at (cp_expr_loc_or_input_loc (orig_cond),
1527 "switch quantity not an integer");
1528 cond = error_mark_node;
1529 }
1530 /* We want unlowered type here to handle enum bit-fields. */
1531 orig_type = unlowered_expr_type (cond);
1532 if (TREE_CODE (orig_type) != ENUMERAL_TYPE)
1533 orig_type = TREE_TYPE (cond);
1534 if (cond != error_mark_node)
1535 {
1536 /* [stmt.switch]
1537
1538 Integral promotions are performed. */
1539 cond = perform_integral_promotions (cond);
1540 cond = maybe_cleanup_point_expr (cond);
1541 }
1542 }
1543 if (check_for_bare_parameter_packs (cond))
1544 cond = error_mark_node;
1545 else if (!processing_template_decl && warn_sequence_point)
1546 verify_sequence_points (cond);
1547
1548 finish_cond (&SWITCH_STMT_COND (switch_stmt), cond);
1549 SWITCH_STMT_TYPE (switch_stmt) = orig_type;
1550 add_stmt (switch_stmt);
1551 push_switch (switch_stmt);
1552 SWITCH_STMT_BODY (switch_stmt) = push_stmt_list ();
1553 }
1554
1555 /* Finish the body of a switch-statement, which may be given by
1556 SWITCH_STMT. The COND to switch on is indicated. */
1557
1558 void
finish_switch_stmt(tree switch_stmt)1559 finish_switch_stmt (tree switch_stmt)
1560 {
1561 tree scope;
1562
1563 SWITCH_STMT_BODY (switch_stmt) =
1564 pop_stmt_list (SWITCH_STMT_BODY (switch_stmt));
1565 pop_switch ();
1566
1567 scope = SWITCH_STMT_SCOPE (switch_stmt);
1568 SWITCH_STMT_SCOPE (switch_stmt) = NULL;
1569 add_stmt (do_poplevel (scope));
1570 }
1571
1572 /* Begin a try-block. Returns a newly-created TRY_BLOCK if
1573 appropriate. */
1574
1575 tree
begin_try_block(void)1576 begin_try_block (void)
1577 {
1578 tree r = build_stmt (input_location, TRY_BLOCK, NULL_TREE, NULL_TREE);
1579 add_stmt (r);
1580 TRY_STMTS (r) = push_stmt_list ();
1581 return r;
1582 }
1583
1584 /* Likewise, for a function-try-block. The block returned in
1585 *COMPOUND_STMT is an artificial outer scope, containing the
1586 function-try-block. */
1587
1588 tree
begin_function_try_block(tree * compound_stmt)1589 begin_function_try_block (tree *compound_stmt)
1590 {
1591 tree r;
1592 /* This outer scope does not exist in the C++ standard, but we need
1593 a place to put __FUNCTION__ and similar variables. */
1594 *compound_stmt = begin_compound_stmt (0);
1595 r = begin_try_block ();
1596 FN_TRY_BLOCK_P (r) = 1;
1597 return r;
1598 }
1599
1600 /* Finish a try-block, which may be given by TRY_BLOCK. */
1601
1602 void
finish_try_block(tree try_block)1603 finish_try_block (tree try_block)
1604 {
1605 TRY_STMTS (try_block) = pop_stmt_list (TRY_STMTS (try_block));
1606 TRY_HANDLERS (try_block) = push_stmt_list ();
1607 }
1608
1609 /* Finish the body of a cleanup try-block, which may be given by
1610 TRY_BLOCK. */
1611
1612 void
finish_cleanup_try_block(tree try_block)1613 finish_cleanup_try_block (tree try_block)
1614 {
1615 TRY_STMTS (try_block) = pop_stmt_list (TRY_STMTS (try_block));
1616 }
1617
1618 /* Finish an implicitly generated try-block, with a cleanup is given
1619 by CLEANUP. */
1620
1621 void
finish_cleanup(tree cleanup,tree try_block)1622 finish_cleanup (tree cleanup, tree try_block)
1623 {
1624 TRY_HANDLERS (try_block) = cleanup;
1625 CLEANUP_P (try_block) = 1;
1626 }
1627
1628 /* Likewise, for a function-try-block. */
1629
1630 void
finish_function_try_block(tree try_block)1631 finish_function_try_block (tree try_block)
1632 {
1633 finish_try_block (try_block);
1634 /* FIXME : something queer about CTOR_INITIALIZER somehow following
1635 the try block, but moving it inside. */
1636 in_function_try_handler = 1;
1637 }
1638
1639 /* Finish a handler-sequence for a try-block, which may be given by
1640 TRY_BLOCK. */
1641
1642 void
finish_handler_sequence(tree try_block)1643 finish_handler_sequence (tree try_block)
1644 {
1645 TRY_HANDLERS (try_block) = pop_stmt_list (TRY_HANDLERS (try_block));
1646 check_handlers (TRY_HANDLERS (try_block));
1647 }
1648
1649 /* Finish the handler-seq for a function-try-block, given by
1650 TRY_BLOCK. COMPOUND_STMT is the outer block created by
1651 begin_function_try_block. */
1652
1653 void
finish_function_handler_sequence(tree try_block,tree compound_stmt)1654 finish_function_handler_sequence (tree try_block, tree compound_stmt)
1655 {
1656 in_function_try_handler = 0;
1657 finish_handler_sequence (try_block);
1658 finish_compound_stmt (compound_stmt);
1659 }
1660
1661 /* Begin a handler. Returns a HANDLER if appropriate. */
1662
1663 tree
begin_handler(void)1664 begin_handler (void)
1665 {
1666 tree r;
1667
1668 r = build_stmt (input_location, HANDLER, NULL_TREE, NULL_TREE);
1669 add_stmt (r);
1670
1671 /* Create a binding level for the eh_info and the exception object
1672 cleanup. */
1673 HANDLER_BODY (r) = do_pushlevel (sk_catch);
1674
1675 return r;
1676 }
1677
1678 /* Finish the handler-parameters for a handler, which may be given by
1679 HANDLER. DECL is the declaration for the catch parameter, or NULL
1680 if this is a `catch (...)' clause. */
1681
1682 void
finish_handler_parms(tree decl,tree handler)1683 finish_handler_parms (tree decl, tree handler)
1684 {
1685 tree type = NULL_TREE;
1686 if (processing_template_decl)
1687 {
1688 if (decl)
1689 {
1690 decl = pushdecl (decl);
1691 decl = push_template_decl (decl);
1692 HANDLER_PARMS (handler) = decl;
1693 type = TREE_TYPE (decl);
1694 }
1695 }
1696 else
1697 {
1698 type = expand_start_catch_block (decl);
1699 if (warn_catch_value
1700 && type != NULL_TREE
1701 && type != error_mark_node
1702 && !TYPE_REF_P (TREE_TYPE (decl)))
1703 {
1704 tree orig_type = TREE_TYPE (decl);
1705 if (CLASS_TYPE_P (orig_type))
1706 {
1707 if (TYPE_POLYMORPHIC_P (orig_type))
1708 warning_at (DECL_SOURCE_LOCATION (decl),
1709 OPT_Wcatch_value_,
1710 "catching polymorphic type %q#T by value",
1711 orig_type);
1712 else if (warn_catch_value > 1)
1713 warning_at (DECL_SOURCE_LOCATION (decl),
1714 OPT_Wcatch_value_,
1715 "catching type %q#T by value", orig_type);
1716 }
1717 else if (warn_catch_value > 2)
1718 warning_at (DECL_SOURCE_LOCATION (decl),
1719 OPT_Wcatch_value_,
1720 "catching non-reference type %q#T", orig_type);
1721 }
1722 }
1723 HANDLER_TYPE (handler) = type;
1724 }
1725
1726 /* Finish a handler, which may be given by HANDLER. The BLOCKs are
1727 the return value from the matching call to finish_handler_parms. */
1728
1729 void
finish_handler(tree handler)1730 finish_handler (tree handler)
1731 {
1732 if (!processing_template_decl)
1733 expand_end_catch_block ();
1734 HANDLER_BODY (handler) = do_poplevel (HANDLER_BODY (handler));
1735 }
1736
1737 /* Begin a compound statement. FLAGS contains some bits that control the
1738 behavior and context. If BCS_NO_SCOPE is set, the compound statement
1739 does not define a scope. If BCS_FN_BODY is set, this is the outermost
1740 block of a function. If BCS_TRY_BLOCK is set, this is the block
1741 created on behalf of a TRY statement. Returns a token to be passed to
1742 finish_compound_stmt. */
1743
1744 tree
begin_compound_stmt(unsigned int flags)1745 begin_compound_stmt (unsigned int flags)
1746 {
1747 tree r;
1748
1749 if (flags & BCS_NO_SCOPE)
1750 {
1751 r = push_stmt_list ();
1752 STATEMENT_LIST_NO_SCOPE (r) = 1;
1753
1754 /* Normally, we try hard to keep the BLOCK for a statement-expression.
1755 But, if it's a statement-expression with a scopeless block, there's
1756 nothing to keep, and we don't want to accidentally keep a block
1757 *inside* the scopeless block. */
1758 keep_next_level (false);
1759 }
1760 else
1761 {
1762 scope_kind sk = sk_block;
1763 if (flags & BCS_TRY_BLOCK)
1764 sk = sk_try;
1765 else if (flags & BCS_TRANSACTION)
1766 sk = sk_transaction;
1767 r = do_pushlevel (sk);
1768 }
1769
1770 /* When processing a template, we need to remember where the braces were,
1771 so that we can set up identical scopes when instantiating the template
1772 later. BIND_EXPR is a handy candidate for this.
1773 Note that do_poplevel won't create a BIND_EXPR itself here (and thus
1774 result in nested BIND_EXPRs), since we don't build BLOCK nodes when
1775 processing templates. */
1776 if (processing_template_decl)
1777 {
1778 r = build3 (BIND_EXPR, NULL, NULL, r, NULL);
1779 BIND_EXPR_TRY_BLOCK (r) = (flags & BCS_TRY_BLOCK) != 0;
1780 BIND_EXPR_BODY_BLOCK (r) = (flags & BCS_FN_BODY) != 0;
1781 TREE_SIDE_EFFECTS (r) = 1;
1782 }
1783
1784 return r;
1785 }
1786
1787 /* Finish a compound-statement, which is given by STMT. */
1788
1789 void
finish_compound_stmt(tree stmt)1790 finish_compound_stmt (tree stmt)
1791 {
1792 if (TREE_CODE (stmt) == BIND_EXPR)
1793 {
1794 tree body = do_poplevel (BIND_EXPR_BODY (stmt));
1795 /* If the STATEMENT_LIST is empty and this BIND_EXPR isn't special,
1796 discard the BIND_EXPR so it can be merged with the containing
1797 STATEMENT_LIST. */
1798 if (TREE_CODE (body) == STATEMENT_LIST
1799 && STATEMENT_LIST_HEAD (body) == NULL
1800 && !BIND_EXPR_BODY_BLOCK (stmt)
1801 && !BIND_EXPR_TRY_BLOCK (stmt))
1802 stmt = body;
1803 else
1804 BIND_EXPR_BODY (stmt) = body;
1805 }
1806 else if (STATEMENT_LIST_NO_SCOPE (stmt))
1807 stmt = pop_stmt_list (stmt);
1808 else
1809 {
1810 /* Destroy any ObjC "super" receivers that may have been
1811 created. */
1812 objc_clear_super_receiver ();
1813
1814 stmt = do_poplevel (stmt);
1815 }
1816
1817 /* ??? See c_end_compound_stmt wrt statement expressions. */
1818 add_stmt (stmt);
1819 }
1820
1821 /* Finish an asm-statement, whose components are a STRING, some
1822 OUTPUT_OPERANDS, some INPUT_OPERANDS, some CLOBBERS and some
1823 LABELS. Also note whether the asm-statement should be
1824 considered volatile, and whether it is asm inline. */
1825
1826 tree
finish_asm_stmt(location_t loc,int volatile_p,tree string,tree output_operands,tree input_operands,tree clobbers,tree labels,bool inline_p)1827 finish_asm_stmt (location_t loc, int volatile_p, tree string,
1828 tree output_operands, tree input_operands, tree clobbers,
1829 tree labels, bool inline_p)
1830 {
1831 tree r;
1832 tree t;
1833 int ninputs = list_length (input_operands);
1834 int noutputs = list_length (output_operands);
1835
1836 if (!processing_template_decl)
1837 {
1838 const char *constraint;
1839 const char **oconstraints;
1840 bool allows_mem, allows_reg, is_inout;
1841 tree operand;
1842 int i;
1843
1844 oconstraints = XALLOCAVEC (const char *, noutputs);
1845
1846 string = resolve_asm_operand_names (string, output_operands,
1847 input_operands, labels);
1848
1849 for (i = 0, t = output_operands; t; t = TREE_CHAIN (t), ++i)
1850 {
1851 operand = TREE_VALUE (t);
1852
1853 /* ??? Really, this should not be here. Users should be using a
1854 proper lvalue, dammit. But there's a long history of using
1855 casts in the output operands. In cases like longlong.h, this
1856 becomes a primitive form of typechecking -- if the cast can be
1857 removed, then the output operand had a type of the proper width;
1858 otherwise we'll get an error. Gross, but ... */
1859 STRIP_NOPS (operand);
1860
1861 operand = mark_lvalue_use (operand);
1862
1863 if (!lvalue_or_else (operand, lv_asm, tf_warning_or_error))
1864 operand = error_mark_node;
1865
1866 if (operand != error_mark_node
1867 && (TREE_READONLY (operand)
1868 || CP_TYPE_CONST_P (TREE_TYPE (operand))
1869 /* Functions are not modifiable, even though they are
1870 lvalues. */
1871 || FUNC_OR_METHOD_TYPE_P (TREE_TYPE (operand))
1872 /* If it's an aggregate and any field is const, then it is
1873 effectively const. */
1874 || (CLASS_TYPE_P (TREE_TYPE (operand))
1875 && C_TYPE_FIELDS_READONLY (TREE_TYPE (operand)))))
1876 cxx_readonly_error (loc, operand, lv_asm);
1877
1878 tree *op = &operand;
1879 while (TREE_CODE (*op) == COMPOUND_EXPR)
1880 op = &TREE_OPERAND (*op, 1);
1881 switch (TREE_CODE (*op))
1882 {
1883 case PREINCREMENT_EXPR:
1884 case PREDECREMENT_EXPR:
1885 case MODIFY_EXPR:
1886 *op = genericize_compound_lvalue (*op);
1887 op = &TREE_OPERAND (*op, 1);
1888 break;
1889 default:
1890 break;
1891 }
1892
1893 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1894 oconstraints[i] = constraint;
1895
1896 if (parse_output_constraint (&constraint, i, ninputs, noutputs,
1897 &allows_mem, &allows_reg, &is_inout))
1898 {
1899 /* If the operand is going to end up in memory,
1900 mark it addressable. */
1901 if (!allows_reg && !cxx_mark_addressable (*op))
1902 operand = error_mark_node;
1903 }
1904 else
1905 operand = error_mark_node;
1906
1907 TREE_VALUE (t) = operand;
1908 }
1909
1910 for (i = 0, t = input_operands; t; ++i, t = TREE_CHAIN (t))
1911 {
1912 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
1913 bool constraint_parsed
1914 = parse_input_constraint (&constraint, i, ninputs, noutputs, 0,
1915 oconstraints, &allows_mem, &allows_reg);
1916 /* If the operand is going to end up in memory, don't call
1917 decay_conversion. */
1918 if (constraint_parsed && !allows_reg && allows_mem)
1919 operand = mark_lvalue_use (TREE_VALUE (t));
1920 else
1921 operand = decay_conversion (TREE_VALUE (t), tf_warning_or_error);
1922
1923 /* If the type of the operand hasn't been determined (e.g.,
1924 because it involves an overloaded function), then issue
1925 an error message. There's no context available to
1926 resolve the overloading. */
1927 if (TREE_TYPE (operand) == unknown_type_node)
1928 {
1929 error_at (loc,
1930 "type of %<asm%> operand %qE could not be determined",
1931 TREE_VALUE (t));
1932 operand = error_mark_node;
1933 }
1934
1935 if (constraint_parsed)
1936 {
1937 /* If the operand is going to end up in memory,
1938 mark it addressable. */
1939 if (!allows_reg && allows_mem)
1940 {
1941 /* Strip the nops as we allow this case. FIXME, this really
1942 should be rejected or made deprecated. */
1943 STRIP_NOPS (operand);
1944
1945 tree *op = &operand;
1946 while (TREE_CODE (*op) == COMPOUND_EXPR)
1947 op = &TREE_OPERAND (*op, 1);
1948 switch (TREE_CODE (*op))
1949 {
1950 case PREINCREMENT_EXPR:
1951 case PREDECREMENT_EXPR:
1952 case MODIFY_EXPR:
1953 *op = genericize_compound_lvalue (*op);
1954 op = &TREE_OPERAND (*op, 1);
1955 break;
1956 default:
1957 break;
1958 }
1959
1960 if (!cxx_mark_addressable (*op))
1961 operand = error_mark_node;
1962 }
1963 else if (!allows_reg && !allows_mem)
1964 {
1965 /* If constraint allows neither register nor memory,
1966 try harder to get a constant. */
1967 tree constop = maybe_constant_value (operand);
1968 if (TREE_CONSTANT (constop))
1969 operand = constop;
1970 }
1971 }
1972 else
1973 operand = error_mark_node;
1974
1975 TREE_VALUE (t) = operand;
1976 }
1977 }
1978
1979 r = build_stmt (loc, ASM_EXPR, string,
1980 output_operands, input_operands,
1981 clobbers, labels);
1982 ASM_VOLATILE_P (r) = volatile_p || noutputs == 0;
1983 ASM_INLINE_P (r) = inline_p;
1984 r = maybe_cleanup_point_expr_void (r);
1985 return add_stmt (r);
1986 }
1987
1988 /* Finish a label with the indicated NAME. Returns the new label. */
1989
1990 tree
finish_label_stmt(tree name)1991 finish_label_stmt (tree name)
1992 {
1993 tree decl = define_label (input_location, name);
1994
1995 if (decl == error_mark_node)
1996 return error_mark_node;
1997
1998 add_stmt (build_stmt (input_location, LABEL_EXPR, decl));
1999
2000 return decl;
2001 }
2002
2003 /* Finish a series of declarations for local labels. G++ allows users
2004 to declare "local" labels, i.e., labels with scope. This extension
2005 is useful when writing code involving statement-expressions. */
2006
2007 void
finish_label_decl(tree name)2008 finish_label_decl (tree name)
2009 {
2010 if (!at_function_scope_p ())
2011 {
2012 error ("%<__label__%> declarations are only allowed in function scopes");
2013 return;
2014 }
2015
2016 add_decl_expr (declare_local_label (name));
2017 }
2018
2019 /* When DECL goes out of scope, make sure that CLEANUP is executed. */
2020
2021 void
finish_decl_cleanup(tree decl,tree cleanup)2022 finish_decl_cleanup (tree decl, tree cleanup)
2023 {
2024 push_cleanup (decl, cleanup, false);
2025 }
2026
2027 /* If the current scope exits with an exception, run CLEANUP. */
2028
2029 void
finish_eh_cleanup(tree cleanup)2030 finish_eh_cleanup (tree cleanup)
2031 {
2032 push_cleanup (NULL, cleanup, true);
2033 }
2034
2035 /* The MEM_INITS is a list of mem-initializers, in reverse of the
2036 order they were written by the user. Each node is as for
2037 emit_mem_initializers. */
2038
2039 void
finish_mem_initializers(tree mem_inits)2040 finish_mem_initializers (tree mem_inits)
2041 {
2042 /* Reorder the MEM_INITS so that they are in the order they appeared
2043 in the source program. */
2044 mem_inits = nreverse (mem_inits);
2045
2046 if (processing_template_decl)
2047 {
2048 tree mem;
2049
2050 for (mem = mem_inits; mem; mem = TREE_CHAIN (mem))
2051 {
2052 /* If the TREE_PURPOSE is a TYPE_PACK_EXPANSION, skip the
2053 check for bare parameter packs in the TREE_VALUE, because
2054 any parameter packs in the TREE_VALUE have already been
2055 bound as part of the TREE_PURPOSE. See
2056 make_pack_expansion for more information. */
2057 if (TREE_CODE (TREE_PURPOSE (mem)) != TYPE_PACK_EXPANSION
2058 && check_for_bare_parameter_packs (TREE_VALUE (mem)))
2059 TREE_VALUE (mem) = error_mark_node;
2060 }
2061
2062 add_stmt (build_min_nt_loc (UNKNOWN_LOCATION,
2063 CTOR_INITIALIZER, mem_inits));
2064 }
2065 else
2066 emit_mem_initializers (mem_inits);
2067 }
2068
2069 /* Obfuscate EXPR if it looks like an id-expression or member access so
2070 that the call to finish_decltype in do_auto_deduction will give the
2071 right result. If EVEN_UNEVAL, do this even in unevaluated context. */
2072
2073 tree
force_paren_expr(tree expr,bool even_uneval)2074 force_paren_expr (tree expr, bool even_uneval)
2075 {
2076 /* This is only needed for decltype(auto) in C++14. */
2077 if (cxx_dialect < cxx14)
2078 return expr;
2079
2080 /* If we're in unevaluated context, we can't be deducing a
2081 return/initializer type, so we don't need to mess with this. */
2082 if (cp_unevaluated_operand && !even_uneval)
2083 return expr;
2084
2085 if (TREE_CODE (expr) == COMPONENT_REF
2086 || TREE_CODE (expr) == SCOPE_REF
2087 || REFERENCE_REF_P (expr))
2088 REF_PARENTHESIZED_P (expr) = true;
2089 else if (DECL_P (tree_strip_any_location_wrapper (expr)))
2090 {
2091 location_t loc = cp_expr_location (expr);
2092 const tree_code code = processing_template_decl ? PAREN_EXPR
2093 : VIEW_CONVERT_EXPR;
2094 expr = build1_loc (loc, code, TREE_TYPE (expr), expr);
2095 REF_PARENTHESIZED_P (expr) = true;
2096 }
2097 return expr;
2098 }
2099
2100 /* If T is an id-expression obfuscated by force_paren_expr, undo the
2101 obfuscation and return the underlying id-expression. Otherwise
2102 return T. */
2103
2104 tree
maybe_undo_parenthesized_ref(tree t)2105 maybe_undo_parenthesized_ref (tree t)
2106 {
2107 if (cxx_dialect < cxx14)
2108 return t;
2109
2110 if ((TREE_CODE (t) == PAREN_EXPR || TREE_CODE (t) == VIEW_CONVERT_EXPR)
2111 && REF_PARENTHESIZED_P (t))
2112 t = TREE_OPERAND (t, 0);
2113
2114 return t;
2115 }
2116
2117 /* Finish a parenthesized expression EXPR. */
2118
2119 cp_expr
finish_parenthesized_expr(cp_expr expr)2120 finish_parenthesized_expr (cp_expr expr)
2121 {
2122 if (EXPR_P (expr))
2123 /* This inhibits warnings in c_common_truthvalue_conversion. */
2124 suppress_warning (expr, OPT_Wparentheses);
2125
2126 if (TREE_CODE (expr) == OFFSET_REF
2127 || TREE_CODE (expr) == SCOPE_REF)
2128 /* [expr.unary.op]/3 The qualified id of a pointer-to-member must not be
2129 enclosed in parentheses. */
2130 PTRMEM_OK_P (expr) = 0;
2131
2132 tree stripped_expr = tree_strip_any_location_wrapper (expr);
2133 if (TREE_CODE (stripped_expr) == STRING_CST)
2134 PAREN_STRING_LITERAL_P (stripped_expr) = 1;
2135
2136 expr = cp_expr (force_paren_expr (expr), expr.get_location ());
2137
2138 return expr;
2139 }
2140
2141 /* Finish a reference to a non-static data member (DECL) that is not
2142 preceded by `.' or `->'. */
2143
2144 tree
finish_non_static_data_member(tree decl,tree object,tree qualifying_scope)2145 finish_non_static_data_member (tree decl, tree object, tree qualifying_scope)
2146 {
2147 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
2148 bool try_omp_private = !object && omp_private_member_map;
2149 tree ret;
2150
2151 if (!object)
2152 {
2153 tree scope = qualifying_scope;
2154 if (scope == NULL_TREE)
2155 {
2156 scope = context_for_name_lookup (decl);
2157 if (!TYPE_P (scope))
2158 {
2159 /* Can happen during error recovery (c++/85014). */
2160 gcc_assert (seen_error ());
2161 return error_mark_node;
2162 }
2163 }
2164 object = maybe_dummy_object (scope, NULL);
2165 }
2166
2167 object = maybe_resolve_dummy (object, true);
2168 if (object == error_mark_node)
2169 return error_mark_node;
2170
2171 /* DR 613/850: Can use non-static data members without an associated
2172 object in sizeof/decltype/alignof. */
2173 if (is_dummy_object (object) && cp_unevaluated_operand == 0
2174 && (!processing_template_decl || !current_class_ref))
2175 {
2176 if (current_function_decl
2177 && DECL_STATIC_FUNCTION_P (current_function_decl))
2178 error ("invalid use of member %qD in static member function", decl);
2179 else
2180 error ("invalid use of non-static data member %qD", decl);
2181 inform (DECL_SOURCE_LOCATION (decl), "declared here");
2182
2183 return error_mark_node;
2184 }
2185
2186 if (current_class_ptr)
2187 TREE_USED (current_class_ptr) = 1;
2188 if (processing_template_decl)
2189 {
2190 tree type = TREE_TYPE (decl);
2191
2192 if (TYPE_REF_P (type))
2193 /* Quals on the object don't matter. */;
2194 else if (PACK_EXPANSION_P (type))
2195 /* Don't bother trying to represent this. */
2196 type = NULL_TREE;
2197 else
2198 {
2199 /* Set the cv qualifiers. */
2200 int quals = cp_type_quals (TREE_TYPE (object));
2201
2202 if (DECL_MUTABLE_P (decl))
2203 quals &= ~TYPE_QUAL_CONST;
2204
2205 quals |= cp_type_quals (TREE_TYPE (decl));
2206 type = cp_build_qualified_type (type, quals);
2207 }
2208
2209 if (qualifying_scope)
2210 /* Wrap this in a SCOPE_REF for now. */
2211 ret = build_qualified_name (type, qualifying_scope, decl,
2212 /*template_p=*/false);
2213 else
2214 ret = (convert_from_reference
2215 (build_min (COMPONENT_REF, type, object, decl, NULL_TREE)));
2216 }
2217 /* If PROCESSING_TEMPLATE_DECL is nonzero here, then
2218 QUALIFYING_SCOPE is also non-null. */
2219 else
2220 {
2221 tree access_type = TREE_TYPE (object);
2222
2223 perform_or_defer_access_check (TYPE_BINFO (access_type), decl,
2224 decl, tf_warning_or_error);
2225
2226 /* If the data member was named `C::M', convert `*this' to `C'
2227 first. */
2228 if (qualifying_scope)
2229 {
2230 tree binfo = NULL_TREE;
2231 object = build_scoped_ref (object, qualifying_scope,
2232 &binfo);
2233 }
2234
2235 ret = build_class_member_access_expr (object, decl,
2236 /*access_path=*/NULL_TREE,
2237 /*preserve_reference=*/false,
2238 tf_warning_or_error);
2239 }
2240 if (try_omp_private)
2241 {
2242 tree *v = omp_private_member_map->get (decl);
2243 if (v)
2244 ret = convert_from_reference (*v);
2245 }
2246 return ret;
2247 }
2248
2249 /* DECL was the declaration to which a qualified-id resolved. Issue
2250 an error message if it is not accessible. If OBJECT_TYPE is
2251 non-NULL, we have just seen `x->' or `x.' and OBJECT_TYPE is the
2252 type of `*x', or `x', respectively. If the DECL was named as
2253 `A::B' then NESTED_NAME_SPECIFIER is `A'. Return value is like
2254 perform_access_checks above. */
2255
2256 bool
check_accessibility_of_qualified_id(tree decl,tree object_type,tree nested_name_specifier,tsubst_flags_t complain)2257 check_accessibility_of_qualified_id (tree decl,
2258 tree object_type,
2259 tree nested_name_specifier,
2260 tsubst_flags_t complain)
2261 {
2262 /* If we're not checking, return immediately. */
2263 if (deferred_access_no_check)
2264 return true;
2265
2266 /* Determine the SCOPE of DECL. */
2267 tree scope = context_for_name_lookup (decl);
2268 /* If the SCOPE is not a type, then DECL is not a member. */
2269 if (!TYPE_P (scope)
2270 /* If SCOPE is dependent then we can't perform this access check now,
2271 and since we'll perform this access check again after substitution
2272 there's no need to explicitly defer it. */
2273 || dependent_type_p (scope))
2274 return true;
2275
2276 tree qualifying_type = NULL_TREE;
2277 /* Compute the scope through which DECL is being accessed. */
2278 if (object_type
2279 /* OBJECT_TYPE might not be a class type; consider:
2280
2281 class A { typedef int I; };
2282 I *p;
2283 p->A::I::~I();
2284
2285 In this case, we will have "A::I" as the DECL, but "I" as the
2286 OBJECT_TYPE. */
2287 && CLASS_TYPE_P (object_type)
2288 && DERIVED_FROM_P (scope, object_type))
2289 /* If we are processing a `->' or `.' expression, use the type of the
2290 left-hand side. */
2291 qualifying_type = object_type;
2292 else if (nested_name_specifier)
2293 {
2294 /* If the reference is to a non-static member of the
2295 current class, treat it as if it were referenced through
2296 `this'. */
2297 if (DECL_NONSTATIC_MEMBER_P (decl)
2298 && current_class_ptr)
2299 if (tree current = current_nonlambda_class_type ())
2300 {
2301 if (dependent_type_p (current))
2302 /* In general we can't know whether this access goes through
2303 `this' until instantiation time. Punt now, or else we might
2304 create a deferred access check that's not relative to `this'
2305 when it ought to be. We'll check this access again after
2306 substitution, e.g. from tsubst_qualified_id. */
2307 return true;
2308
2309 if (DERIVED_FROM_P (scope, current))
2310 qualifying_type = current;
2311 }
2312 /* Otherwise, use the type indicated by the
2313 nested-name-specifier. */
2314 if (!qualifying_type)
2315 qualifying_type = nested_name_specifier;
2316 }
2317 else
2318 /* Otherwise, the name must be from the current class or one of
2319 its bases. */
2320 qualifying_type = currently_open_derived_class (scope);
2321
2322 if (qualifying_type
2323 /* It is possible for qualifying type to be a TEMPLATE_TYPE_PARM
2324 or similar in a default argument value. */
2325 && CLASS_TYPE_P (qualifying_type))
2326 return perform_or_defer_access_check (TYPE_BINFO (qualifying_type), decl,
2327 decl, complain);
2328
2329 return true;
2330 }
2331
2332 /* EXPR is the result of a qualified-id. The QUALIFYING_CLASS was the
2333 class named to the left of the "::" operator. DONE is true if this
2334 expression is a complete postfix-expression; it is false if this
2335 expression is followed by '->', '[', '(', etc. ADDRESS_P is true
2336 iff this expression is the operand of '&'. TEMPLATE_P is true iff
2337 the qualified-id was of the form "A::template B". TEMPLATE_ARG_P
2338 is true iff this qualified name appears as a template argument. */
2339
2340 tree
finish_qualified_id_expr(tree qualifying_class,tree expr,bool done,bool address_p,bool template_p,bool template_arg_p,tsubst_flags_t complain)2341 finish_qualified_id_expr (tree qualifying_class,
2342 tree expr,
2343 bool done,
2344 bool address_p,
2345 bool template_p,
2346 bool template_arg_p,
2347 tsubst_flags_t complain)
2348 {
2349 gcc_assert (TYPE_P (qualifying_class));
2350
2351 if (error_operand_p (expr))
2352 return error_mark_node;
2353
2354 if (DECL_P (expr)
2355 /* Functions are marked after overload resolution; avoid redundant
2356 warnings. */
2357 && TREE_CODE (expr) != FUNCTION_DECL
2358 && !mark_used (expr, complain))
2359 return error_mark_node;
2360
2361 if (template_p)
2362 {
2363 if (TREE_CODE (expr) == UNBOUND_CLASS_TEMPLATE)
2364 {
2365 /* cp_parser_lookup_name thought we were looking for a type,
2366 but we're actually looking for a declaration. */
2367 qualifying_class = TYPE_CONTEXT (expr);
2368 expr = TYPE_IDENTIFIER (expr);
2369 }
2370 else
2371 check_template_keyword (expr);
2372 }
2373
2374 /* If EXPR occurs as the operand of '&', use special handling that
2375 permits a pointer-to-member. */
2376 if (address_p && done
2377 && TREE_CODE (qualifying_class) != ENUMERAL_TYPE)
2378 {
2379 if (TREE_CODE (expr) == SCOPE_REF)
2380 expr = TREE_OPERAND (expr, 1);
2381 expr = build_offset_ref (qualifying_class, expr,
2382 /*address_p=*/true, complain);
2383 return expr;
2384 }
2385
2386 /* No need to check access within an enum. */
2387 if (TREE_CODE (qualifying_class) == ENUMERAL_TYPE
2388 && TREE_CODE (expr) != IDENTIFIER_NODE)
2389 return expr;
2390
2391 /* Within the scope of a class, turn references to non-static
2392 members into expression of the form "this->...". */
2393 if (template_arg_p)
2394 /* But, within a template argument, we do not want make the
2395 transformation, as there is no "this" pointer. */
2396 ;
2397 else if (TREE_CODE (expr) == FIELD_DECL)
2398 {
2399 push_deferring_access_checks (dk_no_check);
2400 expr = finish_non_static_data_member (expr, NULL_TREE,
2401 qualifying_class);
2402 pop_deferring_access_checks ();
2403 }
2404 else if (BASELINK_P (expr))
2405 {
2406 /* See if any of the functions are non-static members. */
2407 /* If so, the expression may be relative to 'this'. */
2408 if (!shared_member_p (expr)
2409 && current_class_ptr
2410 && DERIVED_FROM_P (qualifying_class,
2411 current_nonlambda_class_type ()))
2412 expr = (build_class_member_access_expr
2413 (maybe_dummy_object (qualifying_class, NULL),
2414 expr,
2415 BASELINK_ACCESS_BINFO (expr),
2416 /*preserve_reference=*/false,
2417 complain));
2418 else if (done)
2419 /* The expression is a qualified name whose address is not
2420 being taken. */
2421 expr = build_offset_ref (qualifying_class, expr, /*address_p=*/false,
2422 complain);
2423 }
2424 else if (!template_p
2425 && TREE_CODE (expr) == TEMPLATE_DECL
2426 && !DECL_FUNCTION_TEMPLATE_P (expr))
2427 {
2428 if (complain & tf_error)
2429 error ("%qE missing template arguments", expr);
2430 return error_mark_node;
2431 }
2432 else
2433 {
2434 /* In a template, return a SCOPE_REF for most qualified-ids
2435 so that we can check access at instantiation time. But if
2436 we're looking at a member of the current instantiation, we
2437 know we have access and building up the SCOPE_REF confuses
2438 non-type template argument handling. */
2439 if (processing_template_decl
2440 && (!currently_open_class (qualifying_class)
2441 || TREE_CODE (expr) == IDENTIFIER_NODE
2442 || TREE_CODE (expr) == TEMPLATE_ID_EXPR
2443 || TREE_CODE (expr) == BIT_NOT_EXPR))
2444 expr = build_qualified_name (TREE_TYPE (expr),
2445 qualifying_class, expr,
2446 template_p);
2447 else if (tree wrap = maybe_get_tls_wrapper_call (expr))
2448 expr = wrap;
2449
2450 expr = convert_from_reference (expr);
2451 }
2452
2453 return expr;
2454 }
2455
2456 /* Begin a statement-expression. The value returned must be passed to
2457 finish_stmt_expr. */
2458
2459 tree
begin_stmt_expr(void)2460 begin_stmt_expr (void)
2461 {
2462 return push_stmt_list ();
2463 }
2464
2465 /* Process the final expression of a statement expression. EXPR can be
2466 NULL, if the final expression is empty. Return a STATEMENT_LIST
2467 containing all the statements in the statement-expression, or
2468 ERROR_MARK_NODE if there was an error. */
2469
2470 tree
finish_stmt_expr_expr(tree expr,tree stmt_expr)2471 finish_stmt_expr_expr (tree expr, tree stmt_expr)
2472 {
2473 if (error_operand_p (expr))
2474 {
2475 /* The type of the statement-expression is the type of the last
2476 expression. */
2477 TREE_TYPE (stmt_expr) = error_mark_node;
2478 return error_mark_node;
2479 }
2480
2481 /* If the last statement does not have "void" type, then the value
2482 of the last statement is the value of the entire expression. */
2483 if (expr)
2484 {
2485 tree type = TREE_TYPE (expr);
2486
2487 if (type && type_unknown_p (type))
2488 {
2489 error ("a statement expression is an insufficient context"
2490 " for overload resolution");
2491 TREE_TYPE (stmt_expr) = error_mark_node;
2492 return error_mark_node;
2493 }
2494 else if (processing_template_decl)
2495 {
2496 expr = build_stmt (input_location, EXPR_STMT, expr);
2497 expr = add_stmt (expr);
2498 /* Mark the last statement so that we can recognize it as such at
2499 template-instantiation time. */
2500 EXPR_STMT_STMT_EXPR_RESULT (expr) = 1;
2501 }
2502 else if (VOID_TYPE_P (type))
2503 {
2504 /* Just treat this like an ordinary statement. */
2505 expr = finish_expr_stmt (expr);
2506 }
2507 else
2508 {
2509 /* It actually has a value we need to deal with. First, force it
2510 to be an rvalue so that we won't need to build up a copy
2511 constructor call later when we try to assign it to something. */
2512 expr = force_rvalue (expr, tf_warning_or_error);
2513 if (error_operand_p (expr))
2514 return error_mark_node;
2515
2516 /* Update for array-to-pointer decay. */
2517 type = TREE_TYPE (expr);
2518
2519 /* Wrap it in a CLEANUP_POINT_EXPR and add it to the list like a
2520 normal statement, but don't convert to void or actually add
2521 the EXPR_STMT. */
2522 if (TREE_CODE (expr) != CLEANUP_POINT_EXPR)
2523 expr = maybe_cleanup_point_expr (expr);
2524 add_stmt (expr);
2525 }
2526
2527 /* The type of the statement-expression is the type of the last
2528 expression. */
2529 TREE_TYPE (stmt_expr) = type;
2530 }
2531
2532 return stmt_expr;
2533 }
2534
2535 /* Finish a statement-expression. EXPR should be the value returned
2536 by the previous begin_stmt_expr. Returns an expression
2537 representing the statement-expression. */
2538
2539 tree
finish_stmt_expr(tree stmt_expr,bool has_no_scope)2540 finish_stmt_expr (tree stmt_expr, bool has_no_scope)
2541 {
2542 tree type;
2543 tree result;
2544
2545 if (error_operand_p (stmt_expr))
2546 {
2547 pop_stmt_list (stmt_expr);
2548 return error_mark_node;
2549 }
2550
2551 gcc_assert (TREE_CODE (stmt_expr) == STATEMENT_LIST);
2552
2553 type = TREE_TYPE (stmt_expr);
2554 result = pop_stmt_list (stmt_expr);
2555 TREE_TYPE (result) = type;
2556
2557 if (processing_template_decl)
2558 {
2559 result = build_min (STMT_EXPR, type, result);
2560 TREE_SIDE_EFFECTS (result) = 1;
2561 STMT_EXPR_NO_SCOPE (result) = has_no_scope;
2562 }
2563 else if (CLASS_TYPE_P (type))
2564 {
2565 /* Wrap the statement-expression in a TARGET_EXPR so that the
2566 temporary object created by the final expression is destroyed at
2567 the end of the full-expression containing the
2568 statement-expression. */
2569 result = force_target_expr (type, result, tf_warning_or_error);
2570 }
2571
2572 return result;
2573 }
2574
2575 /* Returns the expression which provides the value of STMT_EXPR. */
2576
2577 tree
stmt_expr_value_expr(tree stmt_expr)2578 stmt_expr_value_expr (tree stmt_expr)
2579 {
2580 tree t = STMT_EXPR_STMT (stmt_expr);
2581
2582 if (TREE_CODE (t) == BIND_EXPR)
2583 t = BIND_EXPR_BODY (t);
2584
2585 if (TREE_CODE (t) == STATEMENT_LIST && STATEMENT_LIST_TAIL (t))
2586 t = STATEMENT_LIST_TAIL (t)->stmt;
2587
2588 if (TREE_CODE (t) == EXPR_STMT)
2589 t = EXPR_STMT_EXPR (t);
2590
2591 return t;
2592 }
2593
2594 /* Return TRUE iff EXPR_STMT is an empty list of
2595 expression statements. */
2596
2597 bool
empty_expr_stmt_p(tree expr_stmt)2598 empty_expr_stmt_p (tree expr_stmt)
2599 {
2600 tree body = NULL_TREE;
2601
2602 if (expr_stmt == void_node)
2603 return true;
2604
2605 if (expr_stmt)
2606 {
2607 if (TREE_CODE (expr_stmt) == EXPR_STMT)
2608 body = EXPR_STMT_EXPR (expr_stmt);
2609 else if (TREE_CODE (expr_stmt) == STATEMENT_LIST)
2610 body = expr_stmt;
2611 }
2612
2613 if (body)
2614 {
2615 if (TREE_CODE (body) == STATEMENT_LIST)
2616 return tsi_end_p (tsi_start (body));
2617 else
2618 return empty_expr_stmt_p (body);
2619 }
2620 return false;
2621 }
2622
2623 /* Perform Koenig lookup. FN_EXPR is the postfix-expression representing
2624 the function (or functions) to call; ARGS are the arguments to the
2625 call. Returns the functions to be considered by overload resolution. */
2626
2627 cp_expr
perform_koenig_lookup(cp_expr fn_expr,vec<tree,va_gc> * args,tsubst_flags_t complain)2628 perform_koenig_lookup (cp_expr fn_expr, vec<tree, va_gc> *args,
2629 tsubst_flags_t complain)
2630 {
2631 tree identifier = NULL_TREE;
2632 tree functions = NULL_TREE;
2633 tree tmpl_args = NULL_TREE;
2634 bool template_id = false;
2635 location_t loc = fn_expr.get_location ();
2636 tree fn = fn_expr.get_value ();
2637
2638 STRIP_ANY_LOCATION_WRAPPER (fn);
2639
2640 if (TREE_CODE (fn) == TEMPLATE_ID_EXPR)
2641 {
2642 /* Use a separate flag to handle null args. */
2643 template_id = true;
2644 tmpl_args = TREE_OPERAND (fn, 1);
2645 fn = TREE_OPERAND (fn, 0);
2646 }
2647
2648 /* Find the name of the overloaded function. */
2649 if (identifier_p (fn))
2650 identifier = fn;
2651 else
2652 {
2653 functions = fn;
2654 identifier = OVL_NAME (functions);
2655 }
2656
2657 /* A call to a namespace-scope function using an unqualified name.
2658
2659 Do Koenig lookup -- unless any of the arguments are
2660 type-dependent. */
2661 if (!any_type_dependent_arguments_p (args)
2662 && !any_dependent_template_arguments_p (tmpl_args))
2663 {
2664 fn = lookup_arg_dependent (identifier, functions, args);
2665 if (!fn)
2666 {
2667 /* The unqualified name could not be resolved. */
2668 if (complain & tf_error)
2669 fn = unqualified_fn_lookup_error (cp_expr (identifier, loc));
2670 else
2671 fn = identifier;
2672 }
2673 }
2674
2675 if (fn && template_id && fn != error_mark_node)
2676 fn = build2 (TEMPLATE_ID_EXPR, unknown_type_node, fn, tmpl_args);
2677
2678 return cp_expr (fn, loc);
2679 }
2680
2681 /* Generate an expression for `FN (ARGS)'. This may change the
2682 contents of ARGS.
2683
2684 If DISALLOW_VIRTUAL is true, the call to FN will be not generated
2685 as a virtual call, even if FN is virtual. (This flag is set when
2686 encountering an expression where the function name is explicitly
2687 qualified. For example a call to `X::f' never generates a virtual
2688 call.)
2689
2690 Returns code for the call. */
2691
2692 tree
finish_call_expr(tree fn,vec<tree,va_gc> ** args,bool disallow_virtual,bool koenig_p,tsubst_flags_t complain)2693 finish_call_expr (tree fn, vec<tree, va_gc> **args, bool disallow_virtual,
2694 bool koenig_p, tsubst_flags_t complain)
2695 {
2696 tree result;
2697 tree orig_fn;
2698 vec<tree, va_gc> *orig_args = *args;
2699
2700 if (fn == error_mark_node)
2701 return error_mark_node;
2702
2703 gcc_assert (!TYPE_P (fn));
2704
2705 /* If FN may be a FUNCTION_DECL obfuscated by force_paren_expr, undo
2706 it so that we can tell this is a call to a known function. */
2707 fn = maybe_undo_parenthesized_ref (fn);
2708
2709 STRIP_ANY_LOCATION_WRAPPER (fn);
2710
2711 orig_fn = fn;
2712
2713 if (processing_template_decl)
2714 {
2715 /* If FN is a local extern declaration (or set thereof) in a template,
2716 look it up again at instantiation time. */
2717 if (is_overloaded_fn (fn))
2718 {
2719 tree ifn = get_first_fn (fn);
2720 if (TREE_CODE (ifn) == FUNCTION_DECL
2721 && dependent_local_decl_p (ifn))
2722 orig_fn = DECL_NAME (ifn);
2723 }
2724
2725 /* If the call expression is dependent, build a CALL_EXPR node
2726 with no type; type_dependent_expression_p recognizes
2727 expressions with no type as being dependent. */
2728 if (type_dependent_expression_p (fn)
2729 || any_type_dependent_arguments_p (*args))
2730 {
2731 result = build_min_nt_call_vec (orig_fn, *args);
2732 SET_EXPR_LOCATION (result, cp_expr_loc_or_input_loc (fn));
2733 KOENIG_LOOKUP_P (result) = koenig_p;
2734 if (is_overloaded_fn (fn))
2735 fn = get_fns (fn);
2736
2737 if (cfun)
2738 {
2739 bool abnormal = true;
2740 for (lkp_iterator iter (fn); abnormal && iter; ++iter)
2741 {
2742 tree fndecl = STRIP_TEMPLATE (*iter);
2743 if (TREE_CODE (fndecl) != FUNCTION_DECL
2744 || !TREE_THIS_VOLATILE (fndecl))
2745 abnormal = false;
2746 }
2747 /* FIXME: Stop warning about falling off end of non-void
2748 function. But this is wrong. Even if we only see
2749 no-return fns at this point, we could select a
2750 future-defined return fn during instantiation. Or
2751 vice-versa. */
2752 if (abnormal)
2753 current_function_returns_abnormally = 1;
2754 }
2755 return result;
2756 }
2757 orig_args = make_tree_vector_copy (*args);
2758 if (!BASELINK_P (fn)
2759 && TREE_CODE (fn) != PSEUDO_DTOR_EXPR
2760 && TREE_TYPE (fn) != unknown_type_node)
2761 fn = build_non_dependent_expr (fn);
2762 make_args_non_dependent (*args);
2763 }
2764
2765 if (TREE_CODE (fn) == COMPONENT_REF)
2766 {
2767 tree member = TREE_OPERAND (fn, 1);
2768 if (BASELINK_P (member))
2769 {
2770 tree object = TREE_OPERAND (fn, 0);
2771 return build_new_method_call (object, member,
2772 args, NULL_TREE,
2773 (disallow_virtual
2774 ? LOOKUP_NORMAL | LOOKUP_NONVIRTUAL
2775 : LOOKUP_NORMAL),
2776 /*fn_p=*/NULL,
2777 complain);
2778 }
2779 }
2780
2781 /* Per 13.3.1.1, '(&f)(...)' is the same as '(f)(...)'. */
2782 if (TREE_CODE (fn) == ADDR_EXPR
2783 && TREE_CODE (TREE_OPERAND (fn, 0)) == OVERLOAD)
2784 fn = TREE_OPERAND (fn, 0);
2785
2786 if (is_overloaded_fn (fn))
2787 fn = baselink_for_fns (fn);
2788
2789 result = NULL_TREE;
2790 if (BASELINK_P (fn))
2791 {
2792 tree object;
2793
2794 /* A call to a member function. From [over.call.func]:
2795
2796 If the keyword this is in scope and refers to the class of
2797 that member function, or a derived class thereof, then the
2798 function call is transformed into a qualified function call
2799 using (*this) as the postfix-expression to the left of the
2800 . operator.... [Otherwise] a contrived object of type T
2801 becomes the implied object argument.
2802
2803 In this situation:
2804
2805 struct A { void f(); };
2806 struct B : public A {};
2807 struct C : public A { void g() { B::f(); }};
2808
2809 "the class of that member function" refers to `A'. But 11.2
2810 [class.access.base] says that we need to convert 'this' to B* as
2811 part of the access, so we pass 'B' to maybe_dummy_object. */
2812
2813 if (DECL_MAYBE_IN_CHARGE_CONSTRUCTOR_P (get_first_fn (fn)))
2814 {
2815 /* A constructor call always uses a dummy object. (This constructor
2816 call which has the form A::A () is actually invalid and we are
2817 going to reject it later in build_new_method_call.) */
2818 object = build_dummy_object (BINFO_TYPE (BASELINK_ACCESS_BINFO (fn)));
2819 }
2820 else
2821 object = maybe_dummy_object (BINFO_TYPE (BASELINK_ACCESS_BINFO (fn)),
2822 NULL);
2823
2824 result = build_new_method_call (object, fn, args, NULL_TREE,
2825 (disallow_virtual
2826 ? LOOKUP_NORMAL|LOOKUP_NONVIRTUAL
2827 : LOOKUP_NORMAL),
2828 /*fn_p=*/NULL,
2829 complain);
2830 }
2831 else if (concept_check_p (fn))
2832 {
2833 /* FN is actually a template-id referring to a concept definition. */
2834 tree id = unpack_concept_check (fn);
2835 tree tmpl = TREE_OPERAND (id, 0);
2836 tree args = TREE_OPERAND (id, 1);
2837
2838 if (!function_concept_p (tmpl))
2839 {
2840 error_at (EXPR_LOC_OR_LOC (fn, input_location),
2841 "cannot call a concept as a function");
2842 return error_mark_node;
2843 }
2844
2845 /* Ensure the result is wrapped as a call expression. */
2846 result = build_concept_check (tmpl, args, tf_warning_or_error);
2847 }
2848 else if (is_overloaded_fn (fn))
2849 {
2850 /* If the function is an overloaded builtin, resolve it. */
2851 if (TREE_CODE (fn) == FUNCTION_DECL
2852 && (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2853 || DECL_BUILT_IN_CLASS (fn) == BUILT_IN_MD))
2854 result = resolve_overloaded_builtin (input_location, fn, *args);
2855
2856 if (!result)
2857 {
2858 if (warn_sizeof_pointer_memaccess
2859 && (complain & tf_warning)
2860 && !vec_safe_is_empty (*args)
2861 && !processing_template_decl)
2862 {
2863 location_t sizeof_arg_loc[3];
2864 tree sizeof_arg[3];
2865 unsigned int i;
2866 for (i = 0; i < 3; i++)
2867 {
2868 tree t;
2869
2870 sizeof_arg_loc[i] = UNKNOWN_LOCATION;
2871 sizeof_arg[i] = NULL_TREE;
2872 if (i >= (*args)->length ())
2873 continue;
2874 t = (**args)[i];
2875 if (TREE_CODE (t) != SIZEOF_EXPR)
2876 continue;
2877 if (SIZEOF_EXPR_TYPE_P (t))
2878 sizeof_arg[i] = TREE_TYPE (TREE_OPERAND (t, 0));
2879 else
2880 sizeof_arg[i] = TREE_OPERAND (t, 0);
2881 sizeof_arg_loc[i] = EXPR_LOCATION (t);
2882 }
2883 sizeof_pointer_memaccess_warning
2884 (sizeof_arg_loc, fn, *args,
2885 sizeof_arg, same_type_ignoring_top_level_qualifiers_p);
2886 }
2887
2888 if ((complain & tf_warning)
2889 && TREE_CODE (fn) == FUNCTION_DECL
2890 && fndecl_built_in_p (fn, BUILT_IN_MEMSET)
2891 && vec_safe_length (*args) == 3
2892 && !any_type_dependent_arguments_p (*args))
2893 {
2894 tree arg0 = (*orig_args)[0];
2895 tree arg1 = (*orig_args)[1];
2896 tree arg2 = (*orig_args)[2];
2897 int literal_mask = ((literal_integer_zerop (arg1) << 1)
2898 | (literal_integer_zerop (arg2) << 2));
2899 warn_for_memset (input_location, arg0, arg2, literal_mask);
2900 }
2901
2902 /* A call to a namespace-scope function. */
2903 result = build_new_function_call (fn, args, complain);
2904 }
2905 }
2906 else if (TREE_CODE (fn) == PSEUDO_DTOR_EXPR)
2907 {
2908 if (!vec_safe_is_empty (*args))
2909 error ("arguments to destructor are not allowed");
2910 /* C++20/DR: If the postfix-expression names a pseudo-destructor (in
2911 which case the postfix-expression is a possibly-parenthesized class
2912 member access), the function call destroys the object of scalar type
2913 denoted by the object expression of the class member access. */
2914 tree ob = TREE_OPERAND (fn, 0);
2915 if (obvalue_p (ob))
2916 result = build_trivial_dtor_call (ob, true);
2917 else
2918 /* No location to clobber. */
2919 result = convert_to_void (ob, ICV_STATEMENT, complain);
2920 }
2921 else if (CLASS_TYPE_P (TREE_TYPE (fn)))
2922 /* If the "function" is really an object of class type, it might
2923 have an overloaded `operator ()'. */
2924 result = build_op_call (fn, args, complain);
2925
2926 if (!result)
2927 /* A call where the function is unknown. */
2928 result = cp_build_function_call_vec (fn, args, complain);
2929
2930 if (processing_template_decl && result != error_mark_node)
2931 {
2932 if (INDIRECT_REF_P (result))
2933 result = TREE_OPERAND (result, 0);
2934
2935 /* Prune all but the selected function from the original overload
2936 set so that we can avoid some duplicate work at instantiation time. */
2937 if (TREE_CODE (result) == CALL_EXPR
2938 && really_overloaded_fn (orig_fn))
2939 {
2940 tree sel_fn = CALL_EXPR_FN (result);
2941 if (TREE_CODE (sel_fn) == COMPONENT_REF)
2942 {
2943 /* The non-dependent result of build_new_method_call. */
2944 sel_fn = TREE_OPERAND (sel_fn, 1);
2945 gcc_assert (BASELINK_P (sel_fn));
2946 }
2947 else if (TREE_CODE (sel_fn) == ADDR_EXPR)
2948 /* Our original callee wasn't wrapped in an ADDR_EXPR,
2949 so strip this ADDR_EXPR added by build_over_call. */
2950 sel_fn = TREE_OPERAND (sel_fn, 0);
2951 orig_fn = sel_fn;
2952 }
2953
2954 result = build_call_vec (TREE_TYPE (result), orig_fn, orig_args);
2955 SET_EXPR_LOCATION (result, input_location);
2956 KOENIG_LOOKUP_P (result) = koenig_p;
2957 release_tree_vector (orig_args);
2958 result = convert_from_reference (result);
2959 }
2960
2961 return result;
2962 }
2963
2964 /* Finish a call to a postfix increment or decrement or EXPR. (Which
2965 is indicated by CODE, which should be POSTINCREMENT_EXPR or
2966 POSTDECREMENT_EXPR.) */
2967
2968 cp_expr
finish_increment_expr(cp_expr expr,enum tree_code code)2969 finish_increment_expr (cp_expr expr, enum tree_code code)
2970 {
2971 /* input_location holds the location of the trailing operator token.
2972 Build a location of the form:
2973 expr++
2974 ~~~~^~
2975 with the caret at the operator token, ranging from the start
2976 of EXPR to the end of the operator token. */
2977 location_t combined_loc = make_location (input_location,
2978 expr.get_start (),
2979 get_finish (input_location));
2980 cp_expr result = build_x_unary_op (combined_loc, code, expr,
2981 NULL_TREE, tf_warning_or_error);
2982 /* TODO: build_x_unary_op doesn't honor the location, so set it here. */
2983 result.set_location (combined_loc);
2984 return result;
2985 }
2986
2987 /* Finish a use of `this'. Returns an expression for `this'. */
2988
2989 tree
finish_this_expr(void)2990 finish_this_expr (void)
2991 {
2992 tree result = NULL_TREE;
2993
2994 if (current_class_ptr)
2995 {
2996 tree type = TREE_TYPE (current_class_ref);
2997
2998 /* In a lambda expression, 'this' refers to the captured 'this'. */
2999 if (LAMBDA_TYPE_P (type))
3000 result = lambda_expr_this_capture (CLASSTYPE_LAMBDA_EXPR (type), true);
3001 else
3002 result = current_class_ptr;
3003 }
3004
3005 if (result)
3006 /* The keyword 'this' is a prvalue expression. */
3007 return rvalue (result);
3008
3009 tree fn = current_nonlambda_function ();
3010 if (fn && DECL_STATIC_FUNCTION_P (fn))
3011 error ("%<this%> is unavailable for static member functions");
3012 else if (fn)
3013 error ("invalid use of %<this%> in non-member function");
3014 else
3015 error ("invalid use of %<this%> at top level");
3016 return error_mark_node;
3017 }
3018
3019 /* Finish a pseudo-destructor expression. If SCOPE is NULL, the
3020 expression was of the form `OBJECT.~DESTRUCTOR' where DESTRUCTOR is
3021 the TYPE for the type given. If SCOPE is non-NULL, the expression
3022 was of the form `OBJECT.SCOPE::~DESTRUCTOR'. */
3023
3024 tree
finish_pseudo_destructor_expr(tree object,tree scope,tree destructor,location_t loc)3025 finish_pseudo_destructor_expr (tree object, tree scope, tree destructor,
3026 location_t loc)
3027 {
3028 if (object == error_mark_node || destructor == error_mark_node)
3029 return error_mark_node;
3030
3031 gcc_assert (TYPE_P (destructor));
3032
3033 if (!processing_template_decl)
3034 {
3035 if (scope == error_mark_node)
3036 {
3037 error_at (loc, "invalid qualifying scope in pseudo-destructor name");
3038 return error_mark_node;
3039 }
3040 if (is_auto (destructor))
3041 destructor = TREE_TYPE (object);
3042 if (scope && TYPE_P (scope) && !check_dtor_name (scope, destructor))
3043 {
3044 error_at (loc,
3045 "qualified type %qT does not match destructor name ~%qT",
3046 scope, destructor);
3047 return error_mark_node;
3048 }
3049
3050
3051 /* [expr.pseudo] says both:
3052
3053 The type designated by the pseudo-destructor-name shall be
3054 the same as the object type.
3055
3056 and:
3057
3058 The cv-unqualified versions of the object type and of the
3059 type designated by the pseudo-destructor-name shall be the
3060 same type.
3061
3062 We implement the more generous second sentence, since that is
3063 what most other compilers do. */
3064 if (!same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (object),
3065 destructor))
3066 {
3067 error_at (loc, "%qE is not of type %qT", object, destructor);
3068 return error_mark_node;
3069 }
3070 }
3071
3072 tree type = (type_dependent_expression_p (object)
3073 ? NULL_TREE : void_type_node);
3074
3075 return build3_loc (loc, PSEUDO_DTOR_EXPR, type, object,
3076 scope, destructor);
3077 }
3078
3079 /* Finish an expression of the form CODE EXPR. */
3080
3081 cp_expr
finish_unary_op_expr(location_t op_loc,enum tree_code code,cp_expr expr,tsubst_flags_t complain)3082 finish_unary_op_expr (location_t op_loc, enum tree_code code, cp_expr expr,
3083 tsubst_flags_t complain)
3084 {
3085 /* Build a location of the form:
3086 ++expr
3087 ^~~~~~
3088 with the caret at the operator token, ranging from the start
3089 of the operator token to the end of EXPR. */
3090 location_t combined_loc = make_location (op_loc,
3091 op_loc, expr.get_finish ());
3092 cp_expr result = build_x_unary_op (combined_loc, code, expr,
3093 NULL_TREE, complain);
3094 /* TODO: build_x_unary_op doesn't always honor the location. */
3095 result.set_location (combined_loc);
3096
3097 if (result == error_mark_node)
3098 return result;
3099
3100 if (!(complain & tf_warning))
3101 return result;
3102
3103 tree result_ovl = result;
3104 tree expr_ovl = expr;
3105
3106 if (!processing_template_decl)
3107 expr_ovl = cp_fully_fold (expr_ovl);
3108
3109 if (!CONSTANT_CLASS_P (expr_ovl)
3110 || TREE_OVERFLOW_P (expr_ovl))
3111 return result;
3112
3113 if (!processing_template_decl)
3114 result_ovl = cp_fully_fold (result_ovl);
3115
3116 if (CONSTANT_CLASS_P (result_ovl) && TREE_OVERFLOW_P (result_ovl))
3117 overflow_warning (combined_loc, result_ovl);
3118
3119 return result;
3120 }
3121
3122 /* Return true if CONSTRUCTOR EXPR after pack expansion could have no
3123 elements. */
3124
3125 static bool
maybe_zero_constructor_nelts(tree expr)3126 maybe_zero_constructor_nelts (tree expr)
3127 {
3128 if (CONSTRUCTOR_NELTS (expr) == 0)
3129 return true;
3130 if (!processing_template_decl)
3131 return false;
3132 for (constructor_elt &elt : CONSTRUCTOR_ELTS (expr))
3133 if (!PACK_EXPANSION_P (elt.value))
3134 return false;
3135 return true;
3136 }
3137
3138 /* Finish a compound-literal expression or C++11 functional cast with aggregate
3139 initializer. TYPE is the type to which the CONSTRUCTOR in COMPOUND_LITERAL
3140 is being cast. */
3141
3142 tree
finish_compound_literal(tree type,tree compound_literal,tsubst_flags_t complain,fcl_t fcl_context)3143 finish_compound_literal (tree type, tree compound_literal,
3144 tsubst_flags_t complain,
3145 fcl_t fcl_context)
3146 {
3147 if (type == error_mark_node)
3148 return error_mark_node;
3149
3150 if (TYPE_REF_P (type))
3151 {
3152 compound_literal
3153 = finish_compound_literal (TREE_TYPE (type), compound_literal,
3154 complain, fcl_context);
3155 /* The prvalue is then used to direct-initialize the reference. */
3156 tree r = (perform_implicit_conversion_flags
3157 (type, compound_literal, complain, LOOKUP_NORMAL));
3158 return convert_from_reference (r);
3159 }
3160
3161 if (!TYPE_OBJ_P (type))
3162 {
3163 /* DR2351 */
3164 if (VOID_TYPE_P (type) && CONSTRUCTOR_NELTS (compound_literal) == 0)
3165 return void_node;
3166 else if (VOID_TYPE_P (type)
3167 && processing_template_decl
3168 && maybe_zero_constructor_nelts (compound_literal))
3169 /* If there are only packs in compound_literal, it could
3170 be void{} after pack expansion. */;
3171 else
3172 {
3173 if (complain & tf_error)
3174 error ("compound literal of non-object type %qT", type);
3175 return error_mark_node;
3176 }
3177 }
3178
3179 if (template_placeholder_p (type))
3180 {
3181 type = do_auto_deduction (type, compound_literal, type, complain,
3182 adc_variable_type);
3183 if (type == error_mark_node)
3184 return error_mark_node;
3185 }
3186 /* C++23 auto{x}. */
3187 else if (is_auto (type)
3188 && !AUTO_IS_DECLTYPE (type)
3189 && CONSTRUCTOR_NELTS (compound_literal) == 1)
3190 {
3191 if (is_constrained_auto (type))
3192 {
3193 if (complain & tf_error)
3194 error ("%<auto{x}%> cannot be constrained");
3195 return error_mark_node;
3196 }
3197 else if (cxx_dialect < cxx23)
3198 pedwarn (input_location, OPT_Wc__23_extensions,
3199 "%<auto{x}%> only available with "
3200 "%<-std=c++2b%> or %<-std=gnu++2b%>");
3201 type = do_auto_deduction (type, compound_literal, type, complain,
3202 adc_variable_type);
3203 if (type == error_mark_node)
3204 return error_mark_node;
3205 }
3206
3207 /* Used to hold a copy of the compound literal in a template. */
3208 tree orig_cl = NULL_TREE;
3209
3210 if (processing_template_decl)
3211 {
3212 const bool dependent_p
3213 = (instantiation_dependent_expression_p (compound_literal)
3214 || dependent_type_p (type));
3215 if (dependent_p)
3216 /* We're about to return, no need to copy. */
3217 orig_cl = compound_literal;
3218 else
3219 /* We're going to need a copy. */
3220 orig_cl = unshare_constructor (compound_literal);
3221 TREE_TYPE (orig_cl) = type;
3222 /* Mark the expression as a compound literal. */
3223 TREE_HAS_CONSTRUCTOR (orig_cl) = 1;
3224 /* And as instantiation-dependent. */
3225 CONSTRUCTOR_IS_DEPENDENT (orig_cl) = dependent_p;
3226 if (fcl_context == fcl_c99)
3227 CONSTRUCTOR_C99_COMPOUND_LITERAL (orig_cl) = 1;
3228 /* If the compound literal is dependent, we're done for now. */
3229 if (dependent_p)
3230 return orig_cl;
3231 /* Otherwise, do go on to e.g. check narrowing. */
3232 }
3233
3234 type = complete_type (type);
3235
3236 if (TYPE_NON_AGGREGATE_CLASS (type))
3237 {
3238 /* Trying to deal with a CONSTRUCTOR instead of a TREE_LIST
3239 everywhere that deals with function arguments would be a pain, so
3240 just wrap it in a TREE_LIST. The parser set a flag so we know
3241 that it came from T{} rather than T({}). */
3242 CONSTRUCTOR_IS_DIRECT_INIT (compound_literal) = 1;
3243 compound_literal = build_tree_list (NULL_TREE, compound_literal);
3244 return build_functional_cast (input_location, type,
3245 compound_literal, complain);
3246 }
3247
3248 if (TREE_CODE (type) == ARRAY_TYPE
3249 && check_array_initializer (NULL_TREE, type, compound_literal))
3250 return error_mark_node;
3251 compound_literal = reshape_init (type, compound_literal, complain);
3252 if (SCALAR_TYPE_P (type)
3253 && !BRACE_ENCLOSED_INITIALIZER_P (compound_literal)
3254 && !check_narrowing (type, compound_literal, complain))
3255 return error_mark_node;
3256 if (TREE_CODE (type) == ARRAY_TYPE
3257 && TYPE_DOMAIN (type) == NULL_TREE)
3258 {
3259 cp_complete_array_type_or_error (&type, compound_literal,
3260 false, complain);
3261 if (type == error_mark_node)
3262 return error_mark_node;
3263 }
3264 compound_literal = digest_init_flags (type, compound_literal,
3265 LOOKUP_NORMAL | LOOKUP_NO_NARROWING,
3266 complain);
3267 if (compound_literal == error_mark_node)
3268 return error_mark_node;
3269
3270 /* If we're in a template, return the original compound literal. */
3271 if (orig_cl)
3272 return orig_cl;
3273
3274 if (TREE_CODE (compound_literal) == CONSTRUCTOR)
3275 {
3276 TREE_HAS_CONSTRUCTOR (compound_literal) = true;
3277 if (fcl_context == fcl_c99)
3278 CONSTRUCTOR_C99_COMPOUND_LITERAL (compound_literal) = 1;
3279 }
3280
3281 /* Put static/constant array temporaries in static variables. */
3282 /* FIXME all C99 compound literals should be variables rather than C++
3283 temporaries, unless they are used as an aggregate initializer. */
3284 if ((!at_function_scope_p () || CP_TYPE_CONST_P (type))
3285 && fcl_context == fcl_c99
3286 && TREE_CODE (type) == ARRAY_TYPE
3287 && !TYPE_HAS_NONTRIVIAL_DESTRUCTOR (type)
3288 && initializer_constant_valid_p (compound_literal, type))
3289 {
3290 tree decl = create_temporary_var (type);
3291 DECL_CONTEXT (decl) = NULL_TREE;
3292 DECL_INITIAL (decl) = compound_literal;
3293 TREE_STATIC (decl) = 1;
3294 if (literal_type_p (type) && CP_TYPE_CONST_NON_VOLATILE_P (type))
3295 {
3296 /* 5.19 says that a constant expression can include an
3297 lvalue-rvalue conversion applied to "a glvalue of literal type
3298 that refers to a non-volatile temporary object initialized
3299 with a constant expression". Rather than try to communicate
3300 that this VAR_DECL is a temporary, just mark it constexpr. */
3301 DECL_DECLARED_CONSTEXPR_P (decl) = true;
3302 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (decl) = true;
3303 TREE_CONSTANT (decl) = true;
3304 }
3305 cp_apply_type_quals_to_decl (cp_type_quals (type), decl);
3306 decl = pushdecl_top_level (decl);
3307 DECL_NAME (decl) = make_anon_name ();
3308 SET_DECL_ASSEMBLER_NAME (decl, DECL_NAME (decl));
3309 /* Make sure the destructor is callable. */
3310 tree clean = cxx_maybe_build_cleanup (decl, complain);
3311 if (clean == error_mark_node)
3312 return error_mark_node;
3313 return decl;
3314 }
3315
3316 /* Represent other compound literals with TARGET_EXPR so we produce
3317 a prvalue, and can elide copies. */
3318 if (!VECTOR_TYPE_P (type)
3319 && (TREE_CODE (compound_literal) == CONSTRUCTOR
3320 || TREE_CODE (compound_literal) == VEC_INIT_EXPR))
3321 {
3322 /* The CONSTRUCTOR is now an initializer, not a compound literal. */
3323 if (TREE_CODE (compound_literal) == CONSTRUCTOR)
3324 TREE_HAS_CONSTRUCTOR (compound_literal) = false;
3325 compound_literal = get_target_expr_sfinae (compound_literal, complain);
3326 }
3327 else
3328 /* For e.g. int{42} just make sure it's a prvalue. */
3329 compound_literal = rvalue (compound_literal);
3330
3331 return compound_literal;
3332 }
3333
3334 /* Return the declaration for the function-name variable indicated by
3335 ID. */
3336
3337 tree
finish_fname(tree id)3338 finish_fname (tree id)
3339 {
3340 tree decl;
3341
3342 decl = fname_decl (input_location, C_RID_CODE (id), id);
3343 if (processing_template_decl && current_function_decl
3344 && decl != error_mark_node)
3345 decl = DECL_NAME (decl);
3346 return decl;
3347 }
3348
3349 /* Finish a translation unit. */
3350
3351 void
finish_translation_unit(void)3352 finish_translation_unit (void)
3353 {
3354 /* In case there were missing closebraces,
3355 get us back to the global binding level. */
3356 pop_everything ();
3357 while (current_namespace != global_namespace)
3358 pop_namespace ();
3359
3360 /* Do file scope __FUNCTION__ et al. */
3361 finish_fname_decls ();
3362
3363 if (vec_safe_length (scope_chain->omp_declare_target_attribute))
3364 {
3365 if (!errorcount)
3366 error ("%<#pragma omp declare target%> without corresponding "
3367 "%<#pragma omp end declare target%>");
3368 vec_safe_truncate (scope_chain->omp_declare_target_attribute, 0);
3369 }
3370 }
3371
3372 /* Finish a template type parameter, specified as AGGR IDENTIFIER.
3373 Returns the parameter. */
3374
3375 tree
finish_template_type_parm(tree aggr,tree identifier)3376 finish_template_type_parm (tree aggr, tree identifier)
3377 {
3378 if (aggr != class_type_node)
3379 {
3380 permerror (input_location, "template type parameters must use the keyword %<class%> or %<typename%>");
3381 aggr = class_type_node;
3382 }
3383
3384 return build_tree_list (aggr, identifier);
3385 }
3386
3387 /* Finish a template template parameter, specified as AGGR IDENTIFIER.
3388 Returns the parameter. */
3389
3390 tree
finish_template_template_parm(tree aggr,tree identifier)3391 finish_template_template_parm (tree aggr, tree identifier)
3392 {
3393 tree decl = build_decl (input_location,
3394 TYPE_DECL, identifier, NULL_TREE);
3395
3396 tree tmpl = build_lang_decl (TEMPLATE_DECL, identifier, NULL_TREE);
3397 DECL_TEMPLATE_PARMS (tmpl) = current_template_parms;
3398 DECL_TEMPLATE_RESULT (tmpl) = decl;
3399 DECL_ARTIFICIAL (decl) = 1;
3400
3401 /* Associate the constraints with the underlying declaration,
3402 not the template. */
3403 tree reqs = TEMPLATE_PARMS_CONSTRAINTS (current_template_parms);
3404 tree constr = build_constraints (reqs, NULL_TREE);
3405 set_constraints (decl, constr);
3406
3407 end_template_decl ();
3408
3409 gcc_assert (DECL_TEMPLATE_PARMS (tmpl));
3410
3411 check_default_tmpl_args (decl, DECL_TEMPLATE_PARMS (tmpl),
3412 /*is_primary=*/true, /*is_partial=*/false,
3413 /*is_friend=*/0);
3414
3415 return finish_template_type_parm (aggr, tmpl);
3416 }
3417
3418 /* ARGUMENT is the default-argument value for a template template
3419 parameter. If ARGUMENT is invalid, issue error messages and return
3420 the ERROR_MARK_NODE. Otherwise, ARGUMENT itself is returned. */
3421
3422 tree
check_template_template_default_arg(tree argument)3423 check_template_template_default_arg (tree argument)
3424 {
3425 if (TREE_CODE (argument) != TEMPLATE_DECL
3426 && TREE_CODE (argument) != TEMPLATE_TEMPLATE_PARM
3427 && TREE_CODE (argument) != UNBOUND_CLASS_TEMPLATE)
3428 {
3429 if (TREE_CODE (argument) == TYPE_DECL)
3430 error ("invalid use of type %qT as a default value for a template "
3431 "template-parameter", TREE_TYPE (argument));
3432 else
3433 error ("invalid default argument for a template template parameter");
3434 return error_mark_node;
3435 }
3436
3437 return argument;
3438 }
3439
3440 /* Begin a class definition, as indicated by T. */
3441
3442 tree
begin_class_definition(tree t)3443 begin_class_definition (tree t)
3444 {
3445 if (error_operand_p (t) || error_operand_p (TYPE_MAIN_DECL (t)))
3446 return error_mark_node;
3447
3448 if (processing_template_parmlist && !LAMBDA_TYPE_P (t))
3449 {
3450 error ("definition of %q#T inside template parameter list", t);
3451 return error_mark_node;
3452 }
3453
3454 /* According to the C++ ABI, decimal classes defined in ISO/IEC TR 24733
3455 are passed the same as decimal scalar types. */
3456 if (TREE_CODE (t) == RECORD_TYPE
3457 && !processing_template_decl)
3458 {
3459 tree ns = TYPE_CONTEXT (t);
3460 if (ns && TREE_CODE (ns) == NAMESPACE_DECL
3461 && DECL_CONTEXT (ns) == std_node
3462 && DECL_NAME (ns)
3463 && id_equal (DECL_NAME (ns), "decimal"))
3464 {
3465 const char *n = TYPE_NAME_STRING (t);
3466 if ((strcmp (n, "decimal32") == 0)
3467 || (strcmp (n, "decimal64") == 0)
3468 || (strcmp (n, "decimal128") == 0))
3469 TYPE_TRANSPARENT_AGGR (t) = 1;
3470 }
3471 }
3472
3473 /* A non-implicit typename comes from code like:
3474
3475 template <typename T> struct A {
3476 template <typename U> struct A<T>::B ...
3477
3478 This is erroneous. */
3479 else if (TREE_CODE (t) == TYPENAME_TYPE)
3480 {
3481 error ("invalid definition of qualified type %qT", t);
3482 t = error_mark_node;
3483 }
3484
3485 if (t == error_mark_node || ! MAYBE_CLASS_TYPE_P (t))
3486 {
3487 t = make_class_type (RECORD_TYPE);
3488 pushtag (make_anon_name (), t);
3489 }
3490
3491 if (TYPE_BEING_DEFINED (t))
3492 {
3493 t = make_class_type (TREE_CODE (t));
3494 pushtag (TYPE_IDENTIFIER (t), t);
3495 }
3496
3497 if (modules_p ())
3498 {
3499 if (!module_may_redeclare (TYPE_NAME (t)))
3500 {
3501 error ("cannot declare %qD in a different module", TYPE_NAME (t));
3502 inform (DECL_SOURCE_LOCATION (TYPE_NAME (t)), "declared here");
3503 return error_mark_node;
3504 }
3505 set_instantiating_module (TYPE_NAME (t));
3506 set_defining_module (TYPE_NAME (t));
3507 }
3508
3509 maybe_process_partial_specialization (t);
3510 pushclass (t);
3511 TYPE_BEING_DEFINED (t) = 1;
3512 class_binding_level->defining_class_p = 1;
3513
3514 if (flag_pack_struct)
3515 {
3516 tree v;
3517 TYPE_PACKED (t) = 1;
3518 /* Even though the type is being defined for the first time
3519 here, there might have been a forward declaration, so there
3520 might be cv-qualified variants of T. */
3521 for (v = TYPE_NEXT_VARIANT (t); v; v = TYPE_NEXT_VARIANT (v))
3522 TYPE_PACKED (v) = 1;
3523 }
3524 /* Reset the interface data, at the earliest possible
3525 moment, as it might have been set via a class foo;
3526 before. */
3527 if (! TYPE_UNNAMED_P (t))
3528 {
3529 struct c_fileinfo *finfo = \
3530 get_fileinfo (LOCATION_FILE (input_location));
3531 CLASSTYPE_INTERFACE_ONLY (t) = finfo->interface_only;
3532 SET_CLASSTYPE_INTERFACE_UNKNOWN_X
3533 (t, finfo->interface_unknown);
3534 }
3535 reset_specialization ();
3536
3537 /* Make a declaration for this class in its own scope. */
3538 build_self_reference ();
3539
3540 return t;
3541 }
3542
3543 /* Finish the member declaration given by DECL. */
3544
3545 void
finish_member_declaration(tree decl)3546 finish_member_declaration (tree decl)
3547 {
3548 if (decl == error_mark_node || decl == NULL_TREE)
3549 return;
3550
3551 if (decl == void_type_node)
3552 /* The COMPONENT was a friend, not a member, and so there's
3553 nothing for us to do. */
3554 return;
3555
3556 /* We should see only one DECL at a time. */
3557 gcc_assert (DECL_CHAIN (decl) == NULL_TREE);
3558
3559 /* Don't add decls after definition. */
3560 gcc_assert (TYPE_BEING_DEFINED (current_class_type)
3561 /* We can add lambda types when late parsing default
3562 arguments. */
3563 || LAMBDA_TYPE_P (TREE_TYPE (decl)));
3564
3565 /* Set up access control for DECL. */
3566 TREE_PRIVATE (decl)
3567 = (current_access_specifier == access_private_node);
3568 TREE_PROTECTED (decl)
3569 = (current_access_specifier == access_protected_node);
3570 if (TREE_CODE (decl) == TEMPLATE_DECL)
3571 {
3572 TREE_PRIVATE (DECL_TEMPLATE_RESULT (decl)) = TREE_PRIVATE (decl);
3573 TREE_PROTECTED (DECL_TEMPLATE_RESULT (decl)) = TREE_PROTECTED (decl);
3574 }
3575
3576 /* Mark the DECL as a member of the current class, unless it's
3577 a member of an enumeration. */
3578 if (TREE_CODE (decl) != CONST_DECL)
3579 DECL_CONTEXT (decl) = current_class_type;
3580
3581 /* Remember the single FIELD_DECL an anonymous aggregate type is used for. */
3582 if (TREE_CODE (decl) == FIELD_DECL
3583 && ANON_AGGR_TYPE_P (TREE_TYPE (decl)))
3584 {
3585 gcc_assert (!ANON_AGGR_TYPE_FIELD (TYPE_MAIN_VARIANT (TREE_TYPE (decl))));
3586 ANON_AGGR_TYPE_FIELD (TYPE_MAIN_VARIANT (TREE_TYPE (decl))) = decl;
3587 }
3588
3589 if (TREE_CODE (decl) == USING_DECL)
3590 /* Avoid debug info for class-scope USING_DECLS for now, we'll
3591 call cp_emit_debug_info_for_using later. */
3592 DECL_IGNORED_P (decl) = 1;
3593
3594 /* Check for bare parameter packs in the non-static data member
3595 declaration. */
3596 if (TREE_CODE (decl) == FIELD_DECL)
3597 {
3598 if (check_for_bare_parameter_packs (TREE_TYPE (decl)))
3599 TREE_TYPE (decl) = error_mark_node;
3600 if (check_for_bare_parameter_packs (DECL_ATTRIBUTES (decl)))
3601 DECL_ATTRIBUTES (decl) = NULL_TREE;
3602 }
3603
3604 /* [dcl.link]
3605
3606 A C language linkage is ignored for the names of class members
3607 and the member function type of class member functions. */
3608 if (DECL_LANG_SPECIFIC (decl))
3609 SET_DECL_LANGUAGE (decl, lang_cplusplus);
3610
3611 bool add = false;
3612
3613 /* Functions and non-functions are added differently. */
3614 if (DECL_DECLARES_FUNCTION_P (decl))
3615 add = add_method (current_class_type, decl, false);
3616 /* Enter the DECL into the scope of the class, if the class
3617 isn't a closure (whose fields are supposed to be unnamed). */
3618 else if (CLASSTYPE_LAMBDA_EXPR (current_class_type)
3619 || maybe_push_used_methods (decl)
3620 || pushdecl_class_level (decl))
3621 add = true;
3622
3623 if (add)
3624 {
3625 /* All TYPE_DECLs go at the end of TYPE_FIELDS. Ordinary fields
3626 go at the beginning. The reason is that
3627 legacy_nonfn_member_lookup searches the list in order, and we
3628 want a field name to override a type name so that the "struct
3629 stat hack" will work. In particular:
3630
3631 struct S { enum E { }; static const int E = 5; int ary[S::E]; } s;
3632
3633 is valid. */
3634
3635 if (TREE_CODE (decl) == TYPE_DECL)
3636 TYPE_FIELDS (current_class_type)
3637 = chainon (TYPE_FIELDS (current_class_type), decl);
3638 else
3639 {
3640 DECL_CHAIN (decl) = TYPE_FIELDS (current_class_type);
3641 TYPE_FIELDS (current_class_type) = decl;
3642 }
3643
3644 maybe_add_class_template_decl_list (current_class_type, decl,
3645 /*friend_p=*/0);
3646 }
3647 }
3648
3649 /* Finish processing a complete template declaration. The PARMS are
3650 the template parameters. */
3651
3652 void
finish_template_decl(tree parms)3653 finish_template_decl (tree parms)
3654 {
3655 if (parms)
3656 end_template_decl ();
3657 else
3658 end_specialization ();
3659 }
3660
3661 // Returns the template type of the class scope being entered. If we're
3662 // entering a constrained class scope. TYPE is the class template
3663 // scope being entered and we may need to match the intended type with
3664 // a constrained specialization. For example:
3665 //
3666 // template<Object T>
3667 // struct S { void f(); }; #1
3668 //
3669 // template<Object T>
3670 // void S<T>::f() { } #2
3671 //
3672 // We check, in #2, that S<T> refers precisely to the type declared by
3673 // #1 (i.e., that the constraints match). Note that the following should
3674 // be an error since there is no specialization of S<T> that is
3675 // unconstrained, but this is not diagnosed here.
3676 //
3677 // template<typename T>
3678 // void S<T>::f() { }
3679 //
3680 // We cannot diagnose this problem here since this function also matches
3681 // qualified template names that are not part of a definition. For example:
3682 //
3683 // template<Integral T, Floating_point U>
3684 // typename pair<T, U>::first_type void f(T, U);
3685 //
3686 // Here, it is unlikely that there is a partial specialization of
3687 // pair constrained for for Integral and Floating_point arguments.
3688 //
3689 // The general rule is: if a constrained specialization with matching
3690 // constraints is found return that type. Also note that if TYPE is not a
3691 // class-type (e.g. a typename type), then no fixup is needed.
3692
3693 static tree
fixup_template_type(tree type)3694 fixup_template_type (tree type)
3695 {
3696 // Find the template parameter list at the a depth appropriate to
3697 // the scope we're trying to enter.
3698 tree parms = current_template_parms;
3699 int depth = template_class_depth (type);
3700 for (int n = current_template_depth; n > depth && parms; --n)
3701 parms = TREE_CHAIN (parms);
3702 if (!parms)
3703 return type;
3704 tree cur_reqs = TEMPLATE_PARMS_CONSTRAINTS (parms);
3705 tree cur_constr = build_constraints (cur_reqs, NULL_TREE);
3706
3707 // Search for a specialization whose type and constraints match.
3708 tree tmpl = CLASSTYPE_TI_TEMPLATE (type);
3709 tree specs = DECL_TEMPLATE_SPECIALIZATIONS (tmpl);
3710 while (specs)
3711 {
3712 tree spec_constr = get_constraints (TREE_VALUE (specs));
3713
3714 // If the type and constraints match a specialization, then we
3715 // are entering that type.
3716 if (same_type_p (type, TREE_TYPE (specs))
3717 && equivalent_constraints (cur_constr, spec_constr))
3718 return TREE_TYPE (specs);
3719 specs = TREE_CHAIN (specs);
3720 }
3721
3722 // If no specialization matches, then must return the type
3723 // previously found.
3724 return type;
3725 }
3726
3727 /* Finish processing a template-id (which names a type) of the form
3728 NAME < ARGS >. Return the TYPE_DECL for the type named by the
3729 template-id. If ENTERING_SCOPE is nonzero we are about to enter
3730 the scope of template-id indicated. */
3731
3732 tree
finish_template_type(tree name,tree args,int entering_scope)3733 finish_template_type (tree name, tree args, int entering_scope)
3734 {
3735 tree type;
3736
3737 type = lookup_template_class (name, args,
3738 NULL_TREE, NULL_TREE, entering_scope,
3739 tf_warning_or_error | tf_user);
3740
3741 /* If we might be entering the scope of a partial specialization,
3742 find the one with the right constraints. */
3743 if (flag_concepts
3744 && entering_scope
3745 && CLASS_TYPE_P (type)
3746 && CLASSTYPE_TEMPLATE_INFO (type)
3747 && dependent_type_p (type)
3748 && PRIMARY_TEMPLATE_P (CLASSTYPE_TI_TEMPLATE (type)))
3749 type = fixup_template_type (type);
3750
3751 if (type == error_mark_node)
3752 return type;
3753 else if (CLASS_TYPE_P (type) && !alias_type_or_template_p (type))
3754 return TYPE_STUB_DECL (type);
3755 else
3756 return TYPE_NAME (type);
3757 }
3758
3759 /* Finish processing a BASE_CLASS with the indicated ACCESS_SPECIFIER.
3760 Return a TREE_LIST containing the ACCESS_SPECIFIER and the
3761 BASE_CLASS, or NULL_TREE if an error occurred. The
3762 ACCESS_SPECIFIER is one of
3763 access_{default,public,protected_private}_node. For a virtual base
3764 we set TREE_TYPE. */
3765
3766 tree
finish_base_specifier(tree base,tree access,bool virtual_p)3767 finish_base_specifier (tree base, tree access, bool virtual_p)
3768 {
3769 tree result;
3770
3771 if (base == error_mark_node)
3772 {
3773 error ("invalid base-class specification");
3774 result = NULL_TREE;
3775 }
3776 else if (! MAYBE_CLASS_TYPE_P (base))
3777 {
3778 error ("%qT is not a class type", base);
3779 result = NULL_TREE;
3780 }
3781 else
3782 {
3783 if (cp_type_quals (base) != 0)
3784 {
3785 /* DR 484: Can a base-specifier name a cv-qualified
3786 class type? */
3787 base = TYPE_MAIN_VARIANT (base);
3788 }
3789 result = build_tree_list (access, base);
3790 if (virtual_p)
3791 TREE_TYPE (result) = integer_type_node;
3792 }
3793
3794 return result;
3795 }
3796
3797 /* If FNS is a member function, a set of member functions, or a
3798 template-id referring to one or more member functions, return a
3799 BASELINK for FNS, incorporating the current access context.
3800 Otherwise, return FNS unchanged. */
3801
3802 tree
baselink_for_fns(tree fns)3803 baselink_for_fns (tree fns)
3804 {
3805 tree scope;
3806 tree cl;
3807
3808 if (BASELINK_P (fns)
3809 || error_operand_p (fns))
3810 return fns;
3811
3812 scope = ovl_scope (fns);
3813 if (!CLASS_TYPE_P (scope))
3814 return fns;
3815
3816 cl = currently_open_derived_class (scope);
3817 if (!cl)
3818 cl = scope;
3819 tree access_path = TYPE_BINFO (cl);
3820 tree conv_path = (cl == scope ? access_path
3821 : lookup_base (cl, scope, ba_any, NULL, tf_none));
3822 return build_baselink (conv_path, access_path, fns, /*optype=*/NULL_TREE);
3823 }
3824
3825 /* Returns true iff DECL is a variable from a function outside
3826 the current one. */
3827
3828 static bool
outer_var_p(tree decl)3829 outer_var_p (tree decl)
3830 {
3831 return ((VAR_P (decl) || TREE_CODE (decl) == PARM_DECL)
3832 && DECL_FUNCTION_SCOPE_P (decl)
3833 /* Don't get confused by temporaries. */
3834 && DECL_NAME (decl)
3835 && (DECL_CONTEXT (decl) != current_function_decl
3836 || parsing_nsdmi ()));
3837 }
3838
3839 /* As above, but also checks that DECL is automatic. */
3840
3841 bool
outer_automatic_var_p(tree decl)3842 outer_automatic_var_p (tree decl)
3843 {
3844 return (outer_var_p (decl)
3845 && !TREE_STATIC (decl));
3846 }
3847
3848 /* DECL satisfies outer_automatic_var_p. Possibly complain about it or
3849 rewrite it for lambda capture.
3850
3851 If ODR_USE is true, we're being called from mark_use, and we complain about
3852 use of constant variables. If ODR_USE is false, we're being called for the
3853 id-expression, and we do lambda capture. */
3854
3855 tree
process_outer_var_ref(tree decl,tsubst_flags_t complain,bool odr_use)3856 process_outer_var_ref (tree decl, tsubst_flags_t complain, bool odr_use)
3857 {
3858 if (cp_unevaluated_operand)
3859 {
3860 tree type = TREE_TYPE (decl);
3861 if (!dependent_type_p (type)
3862 && variably_modified_type_p (type, NULL_TREE))
3863 /* VLAs are used even in unevaluated context. */;
3864 else
3865 /* It's not a use (3.2) if we're in an unevaluated context. */
3866 return decl;
3867 }
3868 if (decl == error_mark_node)
3869 return decl;
3870
3871 tree context = DECL_CONTEXT (decl);
3872 tree containing_function = current_function_decl;
3873 tree lambda_stack = NULL_TREE;
3874 tree lambda_expr = NULL_TREE;
3875 tree initializer = convert_from_reference (decl);
3876
3877 /* Mark it as used now even if the use is ill-formed. */
3878 if (!mark_used (decl, complain))
3879 return error_mark_node;
3880
3881 if (parsing_nsdmi ())
3882 containing_function = NULL_TREE;
3883
3884 if (containing_function && LAMBDA_FUNCTION_P (containing_function))
3885 {
3886 /* Check whether we've already built a proxy. */
3887 tree var = decl;
3888 while (is_normal_capture_proxy (var))
3889 var = DECL_CAPTURED_VARIABLE (var);
3890 tree d = retrieve_local_specialization (var);
3891
3892 if (d && d != decl && is_capture_proxy (d))
3893 {
3894 if (DECL_CONTEXT (d) == containing_function)
3895 /* We already have an inner proxy. */
3896 return d;
3897 else
3898 /* We need to capture an outer proxy. */
3899 return process_outer_var_ref (d, complain, odr_use);
3900 }
3901 }
3902
3903 /* If we are in a lambda function, we can move out until we hit
3904 1. the context,
3905 2. a non-lambda function, or
3906 3. a non-default capturing lambda function. */
3907 while (context != containing_function
3908 /* containing_function can be null with invalid generic lambdas. */
3909 && containing_function
3910 && LAMBDA_FUNCTION_P (containing_function))
3911 {
3912 tree closure = DECL_CONTEXT (containing_function);
3913 lambda_expr = CLASSTYPE_LAMBDA_EXPR (closure);
3914
3915 if (TYPE_CLASS_SCOPE_P (closure))
3916 /* A lambda in an NSDMI (c++/64496). */
3917 break;
3918
3919 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda_expr) == CPLD_NONE)
3920 break;
3921
3922 lambda_stack = tree_cons (NULL_TREE, lambda_expr, lambda_stack);
3923
3924 containing_function = decl_function_context (containing_function);
3925 }
3926
3927 /* In a lambda within a template, wait until instantiation time to implicitly
3928 capture a parameter pack. We want to wait because we don't know if we're
3929 capturing the whole pack or a single element, and it's OK to wait because
3930 find_parameter_packs_r walks into the lambda body. */
3931 if (context == containing_function
3932 && DECL_PACK_P (decl))
3933 return decl;
3934
3935 if (lambda_expr && VAR_P (decl) && DECL_ANON_UNION_VAR_P (decl))
3936 {
3937 if (complain & tf_error)
3938 error ("cannot capture member %qD of anonymous union", decl);
3939 return error_mark_node;
3940 }
3941 /* Do lambda capture when processing the id-expression, not when
3942 odr-using a variable. */
3943 if (!odr_use && context == containing_function)
3944 decl = add_default_capture (lambda_stack,
3945 /*id=*/DECL_NAME (decl), initializer);
3946 /* Only an odr-use of an outer automatic variable causes an
3947 error, and a constant variable can decay to a prvalue
3948 constant without odr-use. So don't complain yet. */
3949 else if (!odr_use && decl_constant_var_p (decl))
3950 return decl;
3951 else if (lambda_expr)
3952 {
3953 if (complain & tf_error)
3954 {
3955 error ("%qD is not captured", decl);
3956 tree closure = LAMBDA_EXPR_CLOSURE (lambda_expr);
3957 if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda_expr) == CPLD_NONE)
3958 inform (location_of (closure),
3959 "the lambda has no capture-default");
3960 else if (TYPE_CLASS_SCOPE_P (closure))
3961 inform (UNKNOWN_LOCATION, "lambda in local class %q+T cannot "
3962 "capture variables from the enclosing context",
3963 TYPE_CONTEXT (closure));
3964 inform (DECL_SOURCE_LOCATION (decl), "%q#D declared here", decl);
3965 }
3966 return error_mark_node;
3967 }
3968 else
3969 {
3970 if (complain & tf_error)
3971 {
3972 error (VAR_P (decl)
3973 ? G_("use of local variable with automatic storage from "
3974 "containing function")
3975 : G_("use of parameter from containing function"));
3976 inform (DECL_SOURCE_LOCATION (decl), "%q#D declared here", decl);
3977 }
3978 return error_mark_node;
3979 }
3980 return decl;
3981 }
3982
3983 /* ID_EXPRESSION is a representation of parsed, but unprocessed,
3984 id-expression. (See cp_parser_id_expression for details.) SCOPE,
3985 if non-NULL, is the type or namespace used to explicitly qualify
3986 ID_EXPRESSION. DECL is the entity to which that name has been
3987 resolved.
3988
3989 *CONSTANT_EXPRESSION_P is true if we are presently parsing a
3990 constant-expression. In that case, *NON_CONSTANT_EXPRESSION_P will
3991 be set to true if this expression isn't permitted in a
3992 constant-expression, but it is otherwise not set by this function.
3993 *ALLOW_NON_CONSTANT_EXPRESSION_P is true if we are parsing a
3994 constant-expression, but a non-constant expression is also
3995 permissible.
3996
3997 DONE is true if this expression is a complete postfix-expression;
3998 it is false if this expression is followed by '->', '[', '(', etc.
3999 ADDRESS_P is true iff this expression is the operand of '&'.
4000 TEMPLATE_P is true iff the qualified-id was of the form
4001 "A::template B". TEMPLATE_ARG_P is true iff this qualified name
4002 appears as a template argument.
4003
4004 If an error occurs, and it is the kind of error that might cause
4005 the parser to abort a tentative parse, *ERROR_MSG is filled in. It
4006 is the caller's responsibility to issue the message. *ERROR_MSG
4007 will be a string with static storage duration, so the caller need
4008 not "free" it.
4009
4010 Return an expression for the entity, after issuing appropriate
4011 diagnostics. This function is also responsible for transforming a
4012 reference to a non-static member into a COMPONENT_REF that makes
4013 the use of "this" explicit.
4014
4015 Upon return, *IDK will be filled in appropriately. */
4016 static cp_expr
finish_id_expression_1(tree id_expression,tree decl,tree scope,cp_id_kind * idk,bool integral_constant_expression_p,bool allow_non_integral_constant_expression_p,bool * non_integral_constant_expression_p,bool template_p,bool done,bool address_p,bool template_arg_p,const char ** error_msg,location_t location)4017 finish_id_expression_1 (tree id_expression,
4018 tree decl,
4019 tree scope,
4020 cp_id_kind *idk,
4021 bool integral_constant_expression_p,
4022 bool allow_non_integral_constant_expression_p,
4023 bool *non_integral_constant_expression_p,
4024 bool template_p,
4025 bool done,
4026 bool address_p,
4027 bool template_arg_p,
4028 const char **error_msg,
4029 location_t location)
4030 {
4031 decl = strip_using_decl (decl);
4032
4033 /* Initialize the output parameters. */
4034 *idk = CP_ID_KIND_NONE;
4035 *error_msg = NULL;
4036
4037 if (id_expression == error_mark_node)
4038 return error_mark_node;
4039 /* If we have a template-id, then no further lookup is
4040 required. If the template-id was for a template-class, we
4041 will sometimes have a TYPE_DECL at this point. */
4042 else if (TREE_CODE (decl) == TEMPLATE_ID_EXPR
4043 || TREE_CODE (decl) == TYPE_DECL)
4044 ;
4045 /* Look up the name. */
4046 else
4047 {
4048 if (decl == error_mark_node)
4049 {
4050 /* Name lookup failed. */
4051 if (scope
4052 && (!TYPE_P (scope)
4053 || (!dependent_type_p (scope)
4054 && !(identifier_p (id_expression)
4055 && IDENTIFIER_CONV_OP_P (id_expression)
4056 && dependent_type_p (TREE_TYPE (id_expression))))))
4057 {
4058 /* If the qualifying type is non-dependent (and the name
4059 does not name a conversion operator to a dependent
4060 type), issue an error. */
4061 qualified_name_lookup_error (scope, id_expression, decl, location);
4062 return error_mark_node;
4063 }
4064 else if (!scope)
4065 {
4066 /* It may be resolved via Koenig lookup. */
4067 *idk = CP_ID_KIND_UNQUALIFIED;
4068 return id_expression;
4069 }
4070 else
4071 decl = id_expression;
4072 }
4073
4074 /* Remember that the name was used in the definition of
4075 the current class so that we can check later to see if
4076 the meaning would have been different after the class
4077 was entirely defined. */
4078 if (!scope && decl != error_mark_node && identifier_p (id_expression))
4079 maybe_note_name_used_in_class (id_expression, decl);
4080
4081 /* A use in unevaluated operand might not be instantiated appropriately
4082 if tsubst_copy builds a dummy parm, or if we never instantiate a
4083 generic lambda, so mark it now. */
4084 if (processing_template_decl && cp_unevaluated_operand)
4085 mark_type_use (decl);
4086
4087 /* Disallow uses of local variables from containing functions, except
4088 within lambda-expressions. */
4089 if (outer_automatic_var_p (decl))
4090 {
4091 decl = process_outer_var_ref (decl, tf_warning_or_error);
4092 if (decl == error_mark_node)
4093 return error_mark_node;
4094 }
4095
4096 /* Also disallow uses of function parameters outside the function
4097 body, except inside an unevaluated context (i.e. decltype). */
4098 if (TREE_CODE (decl) == PARM_DECL
4099 && DECL_CONTEXT (decl) == NULL_TREE
4100 && !cp_unevaluated_operand)
4101 {
4102 *error_msg = G_("use of parameter outside function body");
4103 return error_mark_node;
4104 }
4105 }
4106
4107 /* If we didn't find anything, or what we found was a type,
4108 then this wasn't really an id-expression. */
4109 if (TREE_CODE (decl) == TEMPLATE_DECL
4110 && !DECL_FUNCTION_TEMPLATE_P (decl))
4111 {
4112 *error_msg = G_("missing template arguments");
4113 return error_mark_node;
4114 }
4115 else if (TREE_CODE (decl) == TYPE_DECL
4116 || TREE_CODE (decl) == NAMESPACE_DECL)
4117 {
4118 *error_msg = G_("expected primary-expression");
4119 return error_mark_node;
4120 }
4121
4122 /* If the name resolved to a template parameter, there is no
4123 need to look it up again later. */
4124 if ((TREE_CODE (decl) == CONST_DECL && DECL_TEMPLATE_PARM_P (decl))
4125 || TREE_CODE (decl) == TEMPLATE_PARM_INDEX)
4126 {
4127 tree r;
4128
4129 *idk = CP_ID_KIND_NONE;
4130 if (TREE_CODE (decl) == TEMPLATE_PARM_INDEX)
4131 decl = TEMPLATE_PARM_DECL (decl);
4132 r = DECL_INITIAL (decl);
4133 if (CLASS_TYPE_P (TREE_TYPE (r)) && !CP_TYPE_CONST_P (TREE_TYPE (r)))
4134 {
4135 /* If the entity is a template parameter object for a template
4136 parameter of type T, the type of the expression is const T. */
4137 tree ctype = TREE_TYPE (r);
4138 ctype = cp_build_qualified_type (ctype, (cp_type_quals (ctype)
4139 | TYPE_QUAL_CONST));
4140 r = build1 (VIEW_CONVERT_EXPR, ctype, r);
4141 }
4142 r = convert_from_reference (r);
4143 if (integral_constant_expression_p
4144 && !dependent_type_p (TREE_TYPE (decl))
4145 && !(INTEGRAL_OR_ENUMERATION_TYPE_P (TREE_TYPE (r))))
4146 {
4147 if (!allow_non_integral_constant_expression_p)
4148 error ("template parameter %qD of type %qT is not allowed in "
4149 "an integral constant expression because it is not of "
4150 "integral or enumeration type", decl, TREE_TYPE (decl));
4151 *non_integral_constant_expression_p = true;
4152 }
4153 return r;
4154 }
4155 else
4156 {
4157 bool dependent_p = type_dependent_expression_p (decl);
4158
4159 /* If the declaration was explicitly qualified indicate
4160 that. The semantics of `A::f(3)' are different than
4161 `f(3)' if `f' is virtual. */
4162 *idk = (scope
4163 ? CP_ID_KIND_QUALIFIED
4164 : (TREE_CODE (decl) == TEMPLATE_ID_EXPR
4165 ? CP_ID_KIND_TEMPLATE_ID
4166 : (dependent_p
4167 ? CP_ID_KIND_UNQUALIFIED_DEPENDENT
4168 : CP_ID_KIND_UNQUALIFIED)));
4169
4170 if (dependent_p
4171 && !scope
4172 && DECL_P (decl)
4173 && any_dependent_type_attributes_p (DECL_ATTRIBUTES (decl)))
4174 /* Dependent type attributes on the decl mean that the TREE_TYPE is
4175 wrong, so just return the identifier. */
4176 return id_expression;
4177
4178 if (DECL_CLASS_TEMPLATE_P (decl))
4179 {
4180 error ("use of class template %qT as expression", decl);
4181 return error_mark_node;
4182 }
4183
4184 if (TREE_CODE (decl) == TREE_LIST)
4185 {
4186 /* Ambiguous reference to base members. */
4187 error ("request for member %qD is ambiguous in "
4188 "multiple inheritance lattice", id_expression);
4189 print_candidates (decl);
4190 return error_mark_node;
4191 }
4192
4193 /* Mark variable-like entities as used. Functions are similarly
4194 marked either below or after overload resolution. */
4195 if ((VAR_P (decl)
4196 || TREE_CODE (decl) == PARM_DECL
4197 || TREE_CODE (decl) == CONST_DECL
4198 || TREE_CODE (decl) == RESULT_DECL)
4199 && !mark_used (decl))
4200 return error_mark_node;
4201
4202 /* Only certain kinds of names are allowed in constant
4203 expression. Template parameters have already
4204 been handled above. */
4205 if (! error_operand_p (decl)
4206 && !dependent_p
4207 && integral_constant_expression_p
4208 && !decl_constant_var_p (decl)
4209 && TREE_CODE (decl) != CONST_DECL
4210 && !builtin_valid_in_constant_expr_p (decl)
4211 && !concept_check_p (decl))
4212 {
4213 if (!allow_non_integral_constant_expression_p)
4214 {
4215 error ("%qD cannot appear in a constant-expression", decl);
4216 return error_mark_node;
4217 }
4218 *non_integral_constant_expression_p = true;
4219 }
4220
4221 if (tree wrap = maybe_get_tls_wrapper_call (decl))
4222 /* Replace an evaluated use of the thread_local variable with
4223 a call to its wrapper. */
4224 decl = wrap;
4225 else if (TREE_CODE (decl) == TEMPLATE_ID_EXPR
4226 && !dependent_p
4227 && variable_template_p (TREE_OPERAND (decl, 0))
4228 && !concept_check_p (decl))
4229 {
4230 decl = finish_template_variable (decl);
4231 mark_used (decl);
4232 decl = convert_from_reference (decl);
4233 }
4234 else if (concept_check_p (decl))
4235 {
4236 /* Nothing more to do. All of the analysis for concept checks
4237 is done by build_conept_id, called from the parser. */
4238 }
4239 else if (scope)
4240 {
4241 if (TREE_CODE (decl) == SCOPE_REF)
4242 {
4243 gcc_assert (same_type_p (scope, TREE_OPERAND (decl, 0)));
4244 decl = TREE_OPERAND (decl, 1);
4245 }
4246
4247 decl = (adjust_result_of_qualified_name_lookup
4248 (decl, scope, current_nonlambda_class_type()));
4249
4250 cp_warn_deprecated_use_scopes (scope);
4251
4252 if (TYPE_P (scope))
4253 decl = finish_qualified_id_expr (scope,
4254 decl,
4255 done,
4256 address_p,
4257 template_p,
4258 template_arg_p,
4259 tf_warning_or_error);
4260 else
4261 decl = convert_from_reference (decl);
4262 }
4263 else if (TREE_CODE (decl) == FIELD_DECL)
4264 {
4265 /* Since SCOPE is NULL here, this is an unqualified name.
4266 Access checking has been performed during name lookup
4267 already. Turn off checking to avoid duplicate errors. */
4268 push_deferring_access_checks (dk_no_check);
4269 decl = finish_non_static_data_member (decl, NULL_TREE,
4270 /*qualifying_scope=*/NULL_TREE);
4271 pop_deferring_access_checks ();
4272 }
4273 else if (is_overloaded_fn (decl))
4274 {
4275 /* We only need to look at the first function,
4276 because all the fns share the attribute we're
4277 concerned with (all member fns or all non-members). */
4278 tree first_fn = get_first_fn (decl);
4279 first_fn = STRIP_TEMPLATE (first_fn);
4280
4281 if (!template_arg_p
4282 && (TREE_CODE (first_fn) == USING_DECL
4283 || (TREE_CODE (first_fn) == FUNCTION_DECL
4284 && DECL_FUNCTION_MEMBER_P (first_fn)
4285 && !shared_member_p (decl))))
4286 {
4287 /* A set of member functions. */
4288 decl = maybe_dummy_object (DECL_CONTEXT (first_fn), 0);
4289 return finish_class_member_access_expr (decl, id_expression,
4290 /*template_p=*/false,
4291 tf_warning_or_error);
4292 }
4293
4294 decl = baselink_for_fns (decl);
4295 }
4296 else
4297 {
4298 if (DECL_P (decl) && DECL_NONLOCAL (decl)
4299 && DECL_CLASS_SCOPE_P (decl))
4300 {
4301 tree context = context_for_name_lookup (decl);
4302 if (context != current_class_type)
4303 {
4304 tree path = currently_open_derived_class (context);
4305 if (!path)
4306 /* PATH can be null for using an enum of an unrelated
4307 class; we checked its access in lookup_using_decl.
4308
4309 ??? Should this case make a clone instead, like
4310 handle_using_decl? */
4311 gcc_assert (TREE_CODE (decl) == CONST_DECL);
4312 else
4313 perform_or_defer_access_check (TYPE_BINFO (path),
4314 decl, decl,
4315 tf_warning_or_error);
4316 }
4317 }
4318
4319 decl = convert_from_reference (decl);
4320 }
4321 }
4322
4323 return cp_expr (decl, location);
4324 }
4325
4326 /* As per finish_id_expression_1, but adding a wrapper node
4327 around the result if needed to express LOCATION. */
4328
4329 cp_expr
finish_id_expression(tree id_expression,tree decl,tree scope,cp_id_kind * idk,bool integral_constant_expression_p,bool allow_non_integral_constant_expression_p,bool * non_integral_constant_expression_p,bool template_p,bool done,bool address_p,bool template_arg_p,const char ** error_msg,location_t location)4330 finish_id_expression (tree id_expression,
4331 tree decl,
4332 tree scope,
4333 cp_id_kind *idk,
4334 bool integral_constant_expression_p,
4335 bool allow_non_integral_constant_expression_p,
4336 bool *non_integral_constant_expression_p,
4337 bool template_p,
4338 bool done,
4339 bool address_p,
4340 bool template_arg_p,
4341 const char **error_msg,
4342 location_t location)
4343 {
4344 cp_expr result
4345 = finish_id_expression_1 (id_expression, decl, scope, idk,
4346 integral_constant_expression_p,
4347 allow_non_integral_constant_expression_p,
4348 non_integral_constant_expression_p,
4349 template_p, done, address_p, template_arg_p,
4350 error_msg, location);
4351 return result.maybe_add_location_wrapper ();
4352 }
4353
4354 /* Implement the __typeof keyword: Return the type of EXPR, suitable for
4355 use as a type-specifier. */
4356
4357 tree
finish_typeof(tree expr)4358 finish_typeof (tree expr)
4359 {
4360 tree type;
4361
4362 if (type_dependent_expression_p (expr))
4363 {
4364 type = cxx_make_type (TYPEOF_TYPE);
4365 TYPEOF_TYPE_EXPR (type) = expr;
4366 SET_TYPE_STRUCTURAL_EQUALITY (type);
4367
4368 return type;
4369 }
4370
4371 expr = mark_type_use (expr);
4372
4373 type = unlowered_expr_type (expr);
4374
4375 if (!type || type == unknown_type_node)
4376 {
4377 error ("type of %qE is unknown", expr);
4378 return error_mark_node;
4379 }
4380
4381 return type;
4382 }
4383
4384 /* Implement the __underlying_type keyword: Return the underlying
4385 type of TYPE, suitable for use as a type-specifier. */
4386
4387 tree
finish_underlying_type(tree type)4388 finish_underlying_type (tree type)
4389 {
4390 tree underlying_type;
4391
4392 if (processing_template_decl)
4393 {
4394 underlying_type = cxx_make_type (UNDERLYING_TYPE);
4395 UNDERLYING_TYPE_TYPE (underlying_type) = type;
4396 SET_TYPE_STRUCTURAL_EQUALITY (underlying_type);
4397
4398 return underlying_type;
4399 }
4400
4401 if (!complete_type_or_else (type, NULL_TREE))
4402 return error_mark_node;
4403
4404 if (TREE_CODE (type) != ENUMERAL_TYPE)
4405 {
4406 error ("%qT is not an enumeration type", type);
4407 return error_mark_node;
4408 }
4409
4410 underlying_type = ENUM_UNDERLYING_TYPE (type);
4411
4412 /* Fixup necessary in this case because ENUM_UNDERLYING_TYPE
4413 includes TYPE_MIN_VALUE and TYPE_MAX_VALUE information.
4414 See finish_enum_value_list for details. */
4415 if (!ENUM_FIXED_UNDERLYING_TYPE_P (type))
4416 underlying_type
4417 = c_common_type_for_mode (TYPE_MODE (underlying_type),
4418 TYPE_UNSIGNED (underlying_type));
4419
4420 return underlying_type;
4421 }
4422
4423 /* Implement the __direct_bases keyword: Return the direct base classes
4424 of type. */
4425
4426 tree
calculate_direct_bases(tree type,tsubst_flags_t complain)4427 calculate_direct_bases (tree type, tsubst_flags_t complain)
4428 {
4429 if (!complete_type_or_maybe_complain (type, NULL_TREE, complain)
4430 || !NON_UNION_CLASS_TYPE_P (type))
4431 return make_tree_vec (0);
4432
4433 releasing_vec vector;
4434 vec<tree, va_gc> *base_binfos = BINFO_BASE_BINFOS (TYPE_BINFO (type));
4435 tree binfo;
4436 unsigned i;
4437
4438 /* Virtual bases are initialized first */
4439 for (i = 0; base_binfos->iterate (i, &binfo); i++)
4440 if (BINFO_VIRTUAL_P (binfo))
4441 vec_safe_push (vector, binfo);
4442
4443 /* Now non-virtuals */
4444 for (i = 0; base_binfos->iterate (i, &binfo); i++)
4445 if (!BINFO_VIRTUAL_P (binfo))
4446 vec_safe_push (vector, binfo);
4447
4448 tree bases_vec = make_tree_vec (vector->length ());
4449
4450 for (i = 0; i < vector->length (); ++i)
4451 TREE_VEC_ELT (bases_vec, i) = BINFO_TYPE ((*vector)[i]);
4452
4453 return bases_vec;
4454 }
4455
4456 /* Implement the __bases keyword: Return the base classes
4457 of type */
4458
4459 /* Find morally non-virtual base classes by walking binfo hierarchy */
4460 /* Virtual base classes are handled separately in finish_bases */
4461
4462 static tree
dfs_calculate_bases_pre(tree binfo,void *)4463 dfs_calculate_bases_pre (tree binfo, void * /*data_*/)
4464 {
4465 /* Don't walk bases of virtual bases */
4466 return BINFO_VIRTUAL_P (binfo) ? dfs_skip_bases : NULL_TREE;
4467 }
4468
4469 static tree
dfs_calculate_bases_post(tree binfo,void * data_)4470 dfs_calculate_bases_post (tree binfo, void *data_)
4471 {
4472 vec<tree, va_gc> **data = ((vec<tree, va_gc> **) data_);
4473 if (!BINFO_VIRTUAL_P (binfo))
4474 vec_safe_push (*data, BINFO_TYPE (binfo));
4475 return NULL_TREE;
4476 }
4477
4478 /* Calculates the morally non-virtual base classes of a class */
4479 static vec<tree, va_gc> *
calculate_bases_helper(tree type)4480 calculate_bases_helper (tree type)
4481 {
4482 vec<tree, va_gc> *vector = make_tree_vector ();
4483
4484 /* Now add non-virtual base classes in order of construction */
4485 if (TYPE_BINFO (type))
4486 dfs_walk_all (TYPE_BINFO (type),
4487 dfs_calculate_bases_pre, dfs_calculate_bases_post, &vector);
4488 return vector;
4489 }
4490
4491 tree
calculate_bases(tree type,tsubst_flags_t complain)4492 calculate_bases (tree type, tsubst_flags_t complain)
4493 {
4494 if (!complete_type_or_maybe_complain (type, NULL_TREE, complain)
4495 || !NON_UNION_CLASS_TYPE_P (type))
4496 return make_tree_vec (0);
4497
4498 releasing_vec vector;
4499 tree bases_vec = NULL_TREE;
4500 unsigned i;
4501 vec<tree, va_gc> *vbases;
4502 tree binfo;
4503
4504 /* First go through virtual base classes */
4505 for (vbases = CLASSTYPE_VBASECLASSES (type), i = 0;
4506 vec_safe_iterate (vbases, i, &binfo); i++)
4507 {
4508 releasing_vec vbase_bases
4509 = calculate_bases_helper (BINFO_TYPE (binfo));
4510 vec_safe_splice (vector, vbase_bases);
4511 }
4512
4513 /* Now for the non-virtual bases */
4514 releasing_vec nonvbases = calculate_bases_helper (type);
4515 vec_safe_splice (vector, nonvbases);
4516
4517 /* Note that during error recovery vector->length can even be zero. */
4518 if (vector->length () > 1)
4519 {
4520 /* Last element is entire class, so don't copy */
4521 bases_vec = make_tree_vec (vector->length () - 1);
4522
4523 for (i = 0; i < vector->length () - 1; ++i)
4524 TREE_VEC_ELT (bases_vec, i) = (*vector)[i];
4525 }
4526 else
4527 bases_vec = make_tree_vec (0);
4528
4529 return bases_vec;
4530 }
4531
4532 tree
finish_bases(tree type,bool direct)4533 finish_bases (tree type, bool direct)
4534 {
4535 tree bases = NULL_TREE;
4536
4537 if (!processing_template_decl)
4538 {
4539 /* Parameter packs can only be used in templates */
4540 error ("parameter pack %<__bases%> only valid in template declaration");
4541 return error_mark_node;
4542 }
4543
4544 bases = cxx_make_type (BASES);
4545 BASES_TYPE (bases) = type;
4546 BASES_DIRECT (bases) = direct;
4547 SET_TYPE_STRUCTURAL_EQUALITY (bases);
4548
4549 return bases;
4550 }
4551
4552 /* Perform C++-specific checks for __builtin_offsetof before calling
4553 fold_offsetof. */
4554
4555 tree
finish_offsetof(tree object_ptr,tree expr,location_t loc)4556 finish_offsetof (tree object_ptr, tree expr, location_t loc)
4557 {
4558 /* If we're processing a template, we can't finish the semantics yet.
4559 Otherwise we can fold the entire expression now. */
4560 if (processing_template_decl)
4561 {
4562 expr = build2 (OFFSETOF_EXPR, size_type_node, expr, object_ptr);
4563 SET_EXPR_LOCATION (expr, loc);
4564 return expr;
4565 }
4566
4567 if (expr == error_mark_node)
4568 return error_mark_node;
4569
4570 if (TREE_CODE (expr) == PSEUDO_DTOR_EXPR)
4571 {
4572 error ("cannot apply %<offsetof%> to destructor %<~%T%>",
4573 TREE_OPERAND (expr, 2));
4574 return error_mark_node;
4575 }
4576 if (FUNC_OR_METHOD_TYPE_P (TREE_TYPE (expr))
4577 || TREE_TYPE (expr) == unknown_type_node)
4578 {
4579 while (TREE_CODE (expr) == COMPONENT_REF
4580 || TREE_CODE (expr) == COMPOUND_EXPR)
4581 expr = TREE_OPERAND (expr, 1);
4582
4583 if (DECL_P (expr))
4584 {
4585 error ("cannot apply %<offsetof%> to member function %qD", expr);
4586 inform (DECL_SOURCE_LOCATION (expr), "declared here");
4587 }
4588 else
4589 error ("cannot apply %<offsetof%> to member function");
4590 return error_mark_node;
4591 }
4592 if (TREE_CODE (expr) == CONST_DECL)
4593 {
4594 error ("cannot apply %<offsetof%> to an enumerator %qD", expr);
4595 return error_mark_node;
4596 }
4597 if (REFERENCE_REF_P (expr))
4598 expr = TREE_OPERAND (expr, 0);
4599 if (!complete_type_or_else (TREE_TYPE (TREE_TYPE (object_ptr)), object_ptr))
4600 return error_mark_node;
4601 if (warn_invalid_offsetof
4602 && CLASS_TYPE_P (TREE_TYPE (TREE_TYPE (object_ptr)))
4603 && CLASSTYPE_NON_STD_LAYOUT (TREE_TYPE (TREE_TYPE (object_ptr)))
4604 && cp_unevaluated_operand == 0)
4605 warning_at (loc, OPT_Winvalid_offsetof, "%<offsetof%> within "
4606 "non-standard-layout type %qT is conditionally-supported",
4607 TREE_TYPE (TREE_TYPE (object_ptr)));
4608 return fold_offsetof (expr);
4609 }
4610
4611 /* Replace the AGGR_INIT_EXPR at *TP with an equivalent CALL_EXPR. This
4612 function is broken out from the above for the benefit of the tree-ssa
4613 project. */
4614
4615 void
simplify_aggr_init_expr(tree * tp)4616 simplify_aggr_init_expr (tree *tp)
4617 {
4618 tree aggr_init_expr = *tp;
4619
4620 /* Form an appropriate CALL_EXPR. */
4621 tree fn = AGGR_INIT_EXPR_FN (aggr_init_expr);
4622 tree slot = AGGR_INIT_EXPR_SLOT (aggr_init_expr);
4623 tree type = TREE_TYPE (slot);
4624
4625 tree call_expr;
4626 enum style_t { ctor, arg, pcc } style;
4627
4628 if (AGGR_INIT_VIA_CTOR_P (aggr_init_expr))
4629 style = ctor;
4630 #ifdef PCC_STATIC_STRUCT_RETURN
4631 else if (1)
4632 style = pcc;
4633 #endif
4634 else
4635 {
4636 gcc_assert (TREE_ADDRESSABLE (type));
4637 style = arg;
4638 }
4639
4640 call_expr = build_call_array_loc (input_location,
4641 TREE_TYPE (TREE_TYPE (TREE_TYPE (fn))),
4642 fn,
4643 aggr_init_expr_nargs (aggr_init_expr),
4644 AGGR_INIT_EXPR_ARGP (aggr_init_expr));
4645 TREE_NOTHROW (call_expr) = TREE_NOTHROW (aggr_init_expr);
4646 CALL_FROM_THUNK_P (call_expr) = AGGR_INIT_FROM_THUNK_P (aggr_init_expr);
4647 CALL_EXPR_OPERATOR_SYNTAX (call_expr)
4648 = CALL_EXPR_OPERATOR_SYNTAX (aggr_init_expr);
4649 CALL_EXPR_ORDERED_ARGS (call_expr) = CALL_EXPR_ORDERED_ARGS (aggr_init_expr);
4650 CALL_EXPR_REVERSE_ARGS (call_expr) = CALL_EXPR_REVERSE_ARGS (aggr_init_expr);
4651
4652 if (style == ctor)
4653 {
4654 /* Replace the first argument to the ctor with the address of the
4655 slot. */
4656 cxx_mark_addressable (slot);
4657 CALL_EXPR_ARG (call_expr, 0) =
4658 build1 (ADDR_EXPR, build_pointer_type (type), slot);
4659 }
4660 else if (style == arg)
4661 {
4662 /* Just mark it addressable here, and leave the rest to
4663 expand_call{,_inline}. */
4664 cxx_mark_addressable (slot);
4665 CALL_EXPR_RETURN_SLOT_OPT (call_expr) = true;
4666 call_expr = build2 (INIT_EXPR, TREE_TYPE (call_expr), slot, call_expr);
4667 }
4668 else if (style == pcc)
4669 {
4670 /* If we're using the non-reentrant PCC calling convention, then we
4671 need to copy the returned value out of the static buffer into the
4672 SLOT. */
4673 push_deferring_access_checks (dk_no_check);
4674 call_expr = build_aggr_init (slot, call_expr,
4675 DIRECT_BIND | LOOKUP_ONLYCONVERTING,
4676 tf_warning_or_error);
4677 pop_deferring_access_checks ();
4678 call_expr = build2 (COMPOUND_EXPR, TREE_TYPE (slot), call_expr, slot);
4679 }
4680
4681 if (AGGR_INIT_ZERO_FIRST (aggr_init_expr))
4682 {
4683 tree init = build_zero_init (type, NULL_TREE,
4684 /*static_storage_p=*/false);
4685 init = build2 (INIT_EXPR, void_type_node, slot, init);
4686 call_expr = build2 (COMPOUND_EXPR, TREE_TYPE (call_expr),
4687 init, call_expr);
4688 }
4689
4690 *tp = call_expr;
4691 }
4692
4693 /* Emit all thunks to FN that should be emitted when FN is emitted. */
4694
4695 void
emit_associated_thunks(tree fn)4696 emit_associated_thunks (tree fn)
4697 {
4698 /* When we use vcall offsets, we emit thunks with the virtual
4699 functions to which they thunk. The whole point of vcall offsets
4700 is so that you can know statically the entire set of thunks that
4701 will ever be needed for a given virtual function, thereby
4702 enabling you to output all the thunks with the function itself. */
4703 if (DECL_VIRTUAL_P (fn)
4704 /* Do not emit thunks for extern template instantiations. */
4705 && ! DECL_REALLY_EXTERN (fn))
4706 {
4707 tree thunk;
4708
4709 for (thunk = DECL_THUNKS (fn); thunk; thunk = DECL_CHAIN (thunk))
4710 {
4711 if (!THUNK_ALIAS (thunk))
4712 {
4713 use_thunk (thunk, /*emit_p=*/1);
4714 if (DECL_RESULT_THUNK_P (thunk))
4715 {
4716 tree probe;
4717
4718 for (probe = DECL_THUNKS (thunk);
4719 probe; probe = DECL_CHAIN (probe))
4720 use_thunk (probe, /*emit_p=*/1);
4721 }
4722 }
4723 else
4724 gcc_assert (!DECL_THUNKS (thunk));
4725 }
4726 }
4727 }
4728
4729 /* Generate RTL for FN. */
4730
4731 bool
expand_or_defer_fn_1(tree fn)4732 expand_or_defer_fn_1 (tree fn)
4733 {
4734 /* When the parser calls us after finishing the body of a template
4735 function, we don't really want to expand the body. */
4736 if (processing_template_decl)
4737 {
4738 /* Normally, collection only occurs in rest_of_compilation. So,
4739 if we don't collect here, we never collect junk generated
4740 during the processing of templates until we hit a
4741 non-template function. It's not safe to do this inside a
4742 nested class, though, as the parser may have local state that
4743 is not a GC root. */
4744 if (!function_depth)
4745 ggc_collect ();
4746 return false;
4747 }
4748
4749 gcc_assert (DECL_SAVED_TREE (fn));
4750
4751 /* We make a decision about linkage for these functions at the end
4752 of the compilation. Until that point, we do not want the back
4753 end to output them -- but we do want it to see the bodies of
4754 these functions so that it can inline them as appropriate. */
4755 if (DECL_DECLARED_INLINE_P (fn) || DECL_IMPLICIT_INSTANTIATION (fn))
4756 {
4757 if (DECL_INTERFACE_KNOWN (fn))
4758 /* We've already made a decision as to how this function will
4759 be handled. */;
4760 else if (!at_eof
4761 || DECL_IMMEDIATE_FUNCTION_P (fn)
4762 || DECL_OMP_DECLARE_REDUCTION_P (fn))
4763 tentative_decl_linkage (fn);
4764 else
4765 import_export_decl (fn);
4766
4767 /* If the user wants us to keep all inline functions, then mark
4768 this function as needed so that finish_file will make sure to
4769 output it later. Similarly, all dllexport'd functions must
4770 be emitted; there may be callers in other DLLs. */
4771 if (DECL_DECLARED_INLINE_P (fn)
4772 && !DECL_REALLY_EXTERN (fn)
4773 && !DECL_IMMEDIATE_FUNCTION_P (fn)
4774 && !DECL_OMP_DECLARE_REDUCTION_P (fn)
4775 && (flag_keep_inline_functions
4776 || (flag_keep_inline_dllexport
4777 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (fn)))))
4778 {
4779 mark_needed (fn);
4780 DECL_EXTERNAL (fn) = 0;
4781 }
4782 }
4783
4784 /* If this is a constructor or destructor body, we have to clone
4785 it. */
4786 if (maybe_clone_body (fn))
4787 {
4788 /* We don't want to process FN again, so pretend we've written
4789 it out, even though we haven't. */
4790 TREE_ASM_WRITTEN (fn) = 1;
4791 /* If this is a constexpr function, keep DECL_SAVED_TREE. */
4792 if (!DECL_DECLARED_CONSTEXPR_P (fn)
4793 && !(modules_p () && DECL_DECLARED_INLINE_P (fn)))
4794 DECL_SAVED_TREE (fn) = NULL_TREE;
4795 return false;
4796 }
4797
4798 /* There's no reason to do any of the work here if we're only doing
4799 semantic analysis; this code just generates RTL. */
4800 if (flag_syntax_only)
4801 {
4802 /* Pretend that this function has been written out so that we don't try
4803 to expand it again. */
4804 TREE_ASM_WRITTEN (fn) = 1;
4805 return false;
4806 }
4807
4808 if (DECL_OMP_DECLARE_REDUCTION_P (fn))
4809 return false;
4810
4811 return true;
4812 }
4813
4814 void
expand_or_defer_fn(tree fn)4815 expand_or_defer_fn (tree fn)
4816 {
4817 if (expand_or_defer_fn_1 (fn))
4818 {
4819 function_depth++;
4820
4821 /* Expand or defer, at the whim of the compilation unit manager. */
4822 cgraph_node::finalize_function (fn, function_depth > 1);
4823 emit_associated_thunks (fn);
4824
4825 function_depth--;
4826
4827 if (DECL_IMMEDIATE_FUNCTION_P (fn))
4828 {
4829 if (cgraph_node *node = cgraph_node::get (fn))
4830 {
4831 node->body_removed = true;
4832 node->analyzed = false;
4833 node->definition = false;
4834 node->force_output = false;
4835 }
4836 }
4837 }
4838 }
4839
4840 class nrv_data
4841 {
4842 public:
nrv_data()4843 nrv_data () : visited (37) {}
4844
4845 tree var;
4846 tree result;
4847 hash_table<nofree_ptr_hash <tree_node> > visited;
4848 bool in_nrv_cleanup;
4849 };
4850
4851 /* Helper function for walk_tree, used by finalize_nrv below. */
4852
4853 static tree
finalize_nrv_r(tree * tp,int * walk_subtrees,void * data)4854 finalize_nrv_r (tree* tp, int* walk_subtrees, void* data)
4855 {
4856 class nrv_data *dp = (class nrv_data *)data;
4857 tree_node **slot;
4858
4859 /* No need to walk into types. There wouldn't be any need to walk into
4860 non-statements, except that we have to consider STMT_EXPRs. */
4861 if (TYPE_P (*tp))
4862 *walk_subtrees = 0;
4863 /* Change all returns to just refer to the RESULT_DECL; this is a nop,
4864 but differs from using NULL_TREE in that it indicates that we care
4865 about the value of the RESULT_DECL. But preserve anything appended
4866 by check_return_expr. */
4867 else if (TREE_CODE (*tp) == RETURN_EXPR)
4868 {
4869 tree *p = &TREE_OPERAND (*tp, 0);
4870 while (TREE_CODE (*p) == COMPOUND_EXPR)
4871 p = &TREE_OPERAND (*p, 0);
4872 gcc_checking_assert (TREE_CODE (*p) == INIT_EXPR
4873 && TREE_OPERAND (*p, 0) == dp->result);
4874 *p = dp->result;
4875 }
4876 /* Change all cleanups for the NRV to only run when an exception is
4877 thrown. */
4878 else if (TREE_CODE (*tp) == CLEANUP_STMT
4879 && CLEANUP_DECL (*tp) == dp->var)
4880 {
4881 dp->in_nrv_cleanup = true;
4882 cp_walk_tree (&CLEANUP_BODY (*tp), finalize_nrv_r, data, 0);
4883 dp->in_nrv_cleanup = false;
4884 cp_walk_tree (&CLEANUP_EXPR (*tp), finalize_nrv_r, data, 0);
4885 *walk_subtrees = 0;
4886
4887 CLEANUP_EH_ONLY (*tp) = true;
4888
4889 /* If a cleanup might throw, we need to clear current_retval_sentinel on
4890 the exception path so an outer cleanup added by
4891 maybe_splice_retval_cleanup doesn't run. */
4892 if (current_retval_sentinel
4893 && cp_function_chain->throwing_cleanup)
4894 {
4895 tree clear = build2 (MODIFY_EXPR, boolean_type_node,
4896 current_retval_sentinel,
4897 boolean_false_node);
4898
4899 /* We're already only on the EH path, just prepend it. */
4900 tree &exp = CLEANUP_EXPR (*tp);
4901 exp = build2 (COMPOUND_EXPR, void_type_node, clear, exp);
4902 }
4903 }
4904 /* Disable maybe_splice_retval_cleanup within the NRV cleanup scope, we don't
4905 want to destroy the retval before the variable goes out of scope. */
4906 else if (TREE_CODE (*tp) == CLEANUP_STMT
4907 && dp->in_nrv_cleanup
4908 && CLEANUP_DECL (*tp) == dp->result)
4909 CLEANUP_EXPR (*tp) = void_node;
4910 /* Replace the DECL_EXPR for the NRV with an initialization of the
4911 RESULT_DECL, if needed. */
4912 else if (TREE_CODE (*tp) == DECL_EXPR
4913 && DECL_EXPR_DECL (*tp) == dp->var)
4914 {
4915 tree init;
4916 if (DECL_INITIAL (dp->var)
4917 && DECL_INITIAL (dp->var) != error_mark_node)
4918 init = build2 (INIT_EXPR, void_type_node, dp->result,
4919 DECL_INITIAL (dp->var));
4920 else
4921 init = build_empty_stmt (EXPR_LOCATION (*tp));
4922 DECL_INITIAL (dp->var) = NULL_TREE;
4923 SET_EXPR_LOCATION (init, EXPR_LOCATION (*tp));
4924 *tp = init;
4925 }
4926 /* And replace all uses of the NRV with the RESULT_DECL. */
4927 else if (*tp == dp->var)
4928 *tp = dp->result;
4929
4930 /* Avoid walking into the same tree more than once. Unfortunately, we
4931 can't just use walk_tree_without duplicates because it would only call
4932 us for the first occurrence of dp->var in the function body. */
4933 slot = dp->visited.find_slot (*tp, INSERT);
4934 if (*slot)
4935 *walk_subtrees = 0;
4936 else
4937 *slot = *tp;
4938
4939 /* Keep iterating. */
4940 return NULL_TREE;
4941 }
4942
4943 /* Called from finish_function to implement the named return value
4944 optimization by overriding all the RETURN_EXPRs and pertinent
4945 CLEANUP_STMTs and replacing all occurrences of VAR with RESULT, the
4946 RESULT_DECL for the function. */
4947
4948 void
finalize_nrv(tree * tp,tree var,tree result)4949 finalize_nrv (tree *tp, tree var, tree result)
4950 {
4951 class nrv_data data;
4952
4953 /* Copy name from VAR to RESULT. */
4954 DECL_NAME (result) = DECL_NAME (var);
4955 /* Don't forget that we take its address. */
4956 TREE_ADDRESSABLE (result) = TREE_ADDRESSABLE (var);
4957 /* Finally set DECL_VALUE_EXPR to avoid assigning
4958 a stack slot at -O0 for the original var and debug info
4959 uses RESULT location for VAR. */
4960 SET_DECL_VALUE_EXPR (var, result);
4961 DECL_HAS_VALUE_EXPR_P (var) = 1;
4962
4963 data.var = var;
4964 data.result = result;
4965 data.in_nrv_cleanup = false;
4966 cp_walk_tree (tp, finalize_nrv_r, &data, 0);
4967 }
4968
4969 /* Create CP_OMP_CLAUSE_INFO for clause C. Returns true if it is invalid. */
4970
4971 bool
cxx_omp_create_clause_info(tree c,tree type,bool need_default_ctor,bool need_copy_ctor,bool need_copy_assignment,bool need_dtor)4972 cxx_omp_create_clause_info (tree c, tree type, bool need_default_ctor,
4973 bool need_copy_ctor, bool need_copy_assignment,
4974 bool need_dtor)
4975 {
4976 int save_errorcount = errorcount;
4977 tree info, t;
4978
4979 /* Always allocate 3 elements for simplicity. These are the
4980 function decls for the ctor, dtor, and assignment op.
4981 This layout is known to the three lang hooks,
4982 cxx_omp_clause_default_init, cxx_omp_clause_copy_init,
4983 and cxx_omp_clause_assign_op. */
4984 info = make_tree_vec (3);
4985 CP_OMP_CLAUSE_INFO (c) = info;
4986
4987 if (need_default_ctor || need_copy_ctor)
4988 {
4989 if (need_default_ctor)
4990 t = get_default_ctor (type);
4991 else
4992 t = get_copy_ctor (type, tf_warning_or_error);
4993
4994 if (t && !trivial_fn_p (t))
4995 TREE_VEC_ELT (info, 0) = t;
4996 }
4997
4998 if (need_dtor && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (type))
4999 TREE_VEC_ELT (info, 1) = get_dtor (type, tf_warning_or_error);
5000
5001 if (need_copy_assignment)
5002 {
5003 t = get_copy_assign (type);
5004
5005 if (t && !trivial_fn_p (t))
5006 TREE_VEC_ELT (info, 2) = t;
5007 }
5008
5009 return errorcount != save_errorcount;
5010 }
5011
5012 /* If DECL is DECL_OMP_PRIVATIZED_MEMBER, return corresponding
5013 FIELD_DECL, otherwise return DECL itself. */
5014
5015 static tree
omp_clause_decl_field(tree decl)5016 omp_clause_decl_field (tree decl)
5017 {
5018 if (VAR_P (decl)
5019 && DECL_HAS_VALUE_EXPR_P (decl)
5020 && DECL_ARTIFICIAL (decl)
5021 && DECL_LANG_SPECIFIC (decl)
5022 && DECL_OMP_PRIVATIZED_MEMBER (decl))
5023 {
5024 tree f = DECL_VALUE_EXPR (decl);
5025 if (INDIRECT_REF_P (f))
5026 f = TREE_OPERAND (f, 0);
5027 if (TREE_CODE (f) == COMPONENT_REF)
5028 {
5029 f = TREE_OPERAND (f, 1);
5030 gcc_assert (TREE_CODE (f) == FIELD_DECL);
5031 return f;
5032 }
5033 }
5034 return NULL_TREE;
5035 }
5036
5037 /* Adjust DECL if needed for printing using %qE. */
5038
5039 static tree
omp_clause_printable_decl(tree decl)5040 omp_clause_printable_decl (tree decl)
5041 {
5042 tree t = omp_clause_decl_field (decl);
5043 if (t)
5044 return t;
5045 return decl;
5046 }
5047
5048 /* For a FIELD_DECL F and corresponding DECL_OMP_PRIVATIZED_MEMBER
5049 VAR_DECL T that doesn't need a DECL_EXPR added, record it for
5050 privatization. */
5051
5052 static void
omp_note_field_privatization(tree f,tree t)5053 omp_note_field_privatization (tree f, tree t)
5054 {
5055 if (!omp_private_member_map)
5056 omp_private_member_map = new hash_map<tree, tree>;
5057 tree &v = omp_private_member_map->get_or_insert (f);
5058 if (v == NULL_TREE)
5059 {
5060 v = t;
5061 omp_private_member_vec.safe_push (f);
5062 /* Signal that we don't want to create DECL_EXPR for this dummy var. */
5063 omp_private_member_vec.safe_push (integer_zero_node);
5064 }
5065 }
5066
5067 /* Privatize FIELD_DECL T, return corresponding DECL_OMP_PRIVATIZED_MEMBER
5068 dummy VAR_DECL. */
5069
5070 tree
omp_privatize_field(tree t,bool shared)5071 omp_privatize_field (tree t, bool shared)
5072 {
5073 tree m = finish_non_static_data_member (t, NULL_TREE, NULL_TREE);
5074 if (m == error_mark_node)
5075 return error_mark_node;
5076 if (!omp_private_member_map && !shared)
5077 omp_private_member_map = new hash_map<tree, tree>;
5078 if (TYPE_REF_P (TREE_TYPE (t)))
5079 {
5080 gcc_assert (INDIRECT_REF_P (m));
5081 m = TREE_OPERAND (m, 0);
5082 }
5083 tree vb = NULL_TREE;
5084 tree &v = shared ? vb : omp_private_member_map->get_or_insert (t);
5085 if (v == NULL_TREE)
5086 {
5087 v = create_temporary_var (TREE_TYPE (m));
5088 retrofit_lang_decl (v);
5089 DECL_OMP_PRIVATIZED_MEMBER (v) = 1;
5090 SET_DECL_VALUE_EXPR (v, m);
5091 DECL_HAS_VALUE_EXPR_P (v) = 1;
5092 if (!shared)
5093 omp_private_member_vec.safe_push (t);
5094 }
5095 return v;
5096 }
5097
5098 /* Helper function for handle_omp_array_sections. Called recursively
5099 to handle multiple array-section-subscripts. C is the clause,
5100 T current expression (initially OMP_CLAUSE_DECL), which is either
5101 a TREE_LIST for array-section-subscript (TREE_PURPOSE is low-bound
5102 expression if specified, TREE_VALUE length expression if specified,
5103 TREE_CHAIN is what it has been specified after, or some decl.
5104 TYPES vector is populated with array section types, MAYBE_ZERO_LEN
5105 set to true if any of the array-section-subscript could have length
5106 of zero (explicit or implicit), FIRST_NON_ONE is the index of the
5107 first array-section-subscript which is known not to have length
5108 of one. Given say:
5109 map(a[:b][2:1][:c][:2][:d][e:f][2:5])
5110 FIRST_NON_ONE will be 3, array-section-subscript [:b], [2:1] and [:c]
5111 all are or may have length of 1, array-section-subscript [:2] is the
5112 first one known not to have length 1. For array-section-subscript
5113 <= FIRST_NON_ONE we diagnose non-contiguous arrays if low bound isn't
5114 0 or length isn't the array domain max + 1, for > FIRST_NON_ONE we
5115 can if MAYBE_ZERO_LEN is false. MAYBE_ZERO_LEN will be true in the above
5116 case though, as some lengths could be zero. */
5117
5118 static tree
handle_omp_array_sections_1(tree c,tree t,vec<tree> & types,bool & maybe_zero_len,unsigned int & first_non_one,enum c_omp_region_type ort)5119 handle_omp_array_sections_1 (tree c, tree t, vec<tree> &types,
5120 bool &maybe_zero_len, unsigned int &first_non_one,
5121 enum c_omp_region_type ort)
5122 {
5123 tree ret, low_bound, length, type;
5124 if (TREE_CODE (t) != TREE_LIST)
5125 {
5126 if (error_operand_p (t))
5127 return error_mark_node;
5128 if (REFERENCE_REF_P (t)
5129 && TREE_CODE (TREE_OPERAND (t, 0)) == COMPONENT_REF)
5130 t = TREE_OPERAND (t, 0);
5131 ret = t;
5132 while (TREE_CODE (t) == INDIRECT_REF)
5133 {
5134 t = TREE_OPERAND (t, 0);
5135 STRIP_NOPS (t);
5136 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
5137 t = TREE_OPERAND (t, 0);
5138 }
5139 while (TREE_CODE (t) == COMPOUND_EXPR)
5140 {
5141 t = TREE_OPERAND (t, 1);
5142 STRIP_NOPS (t);
5143 }
5144 if (TREE_CODE (t) == COMPONENT_REF
5145 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
5146 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO
5147 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM)
5148 && !type_dependent_expression_p (t))
5149 {
5150 if (TREE_CODE (TREE_OPERAND (t, 1)) == FIELD_DECL
5151 && DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
5152 {
5153 error_at (OMP_CLAUSE_LOCATION (c),
5154 "bit-field %qE in %qs clause",
5155 t, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
5156 return error_mark_node;
5157 }
5158 while (TREE_CODE (t) == COMPONENT_REF)
5159 {
5160 if (TREE_TYPE (TREE_OPERAND (t, 0))
5161 && TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0))) == UNION_TYPE)
5162 {
5163 error_at (OMP_CLAUSE_LOCATION (c),
5164 "%qE is a member of a union", t);
5165 return error_mark_node;
5166 }
5167 t = TREE_OPERAND (t, 0);
5168 while (TREE_CODE (t) == MEM_REF
5169 || TREE_CODE (t) == INDIRECT_REF
5170 || TREE_CODE (t) == ARRAY_REF)
5171 {
5172 t = TREE_OPERAND (t, 0);
5173 STRIP_NOPS (t);
5174 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
5175 t = TREE_OPERAND (t, 0);
5176 }
5177 }
5178 if (REFERENCE_REF_P (t))
5179 t = TREE_OPERAND (t, 0);
5180 }
5181 if (TREE_CODE (t) == FIELD_DECL)
5182 ret = finish_non_static_data_member (t, NULL_TREE, NULL_TREE);
5183 else if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL)
5184 {
5185 if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
5186 return NULL_TREE;
5187 if (DECL_P (t))
5188 error_at (OMP_CLAUSE_LOCATION (c),
5189 "%qD is not a variable in %qs clause", t,
5190 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
5191 else
5192 error_at (OMP_CLAUSE_LOCATION (c),
5193 "%qE is not a variable in %qs clause", t,
5194 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
5195 return error_mark_node;
5196 }
5197 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY
5198 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
5199 && VAR_P (t) && CP_DECL_THREAD_LOCAL_P (t))
5200 {
5201 error_at (OMP_CLAUSE_LOCATION (c),
5202 "%qD is threadprivate variable in %qs clause", t,
5203 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
5204 return error_mark_node;
5205 }
5206 if (type_dependent_expression_p (ret))
5207 return NULL_TREE;
5208 ret = convert_from_reference (ret);
5209 return ret;
5210 }
5211
5212 if ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP
5213 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5214 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5215 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
5216 && TREE_CODE (TREE_CHAIN (t)) == FIELD_DECL)
5217 TREE_CHAIN (t) = omp_privatize_field (TREE_CHAIN (t), false);
5218 ret = handle_omp_array_sections_1 (c, TREE_CHAIN (t), types,
5219 maybe_zero_len, first_non_one, ort);
5220 if (ret == error_mark_node || ret == NULL_TREE)
5221 return ret;
5222
5223 type = TREE_TYPE (ret);
5224 low_bound = TREE_PURPOSE (t);
5225 length = TREE_VALUE (t);
5226 if ((low_bound && type_dependent_expression_p (low_bound))
5227 || (length && type_dependent_expression_p (length)))
5228 return NULL_TREE;
5229
5230 if (low_bound == error_mark_node || length == error_mark_node)
5231 return error_mark_node;
5232
5233 if (low_bound && !INTEGRAL_TYPE_P (TREE_TYPE (low_bound)))
5234 {
5235 error_at (OMP_CLAUSE_LOCATION (c),
5236 "low bound %qE of array section does not have integral type",
5237 low_bound);
5238 return error_mark_node;
5239 }
5240 if (length && !INTEGRAL_TYPE_P (TREE_TYPE (length)))
5241 {
5242 error_at (OMP_CLAUSE_LOCATION (c),
5243 "length %qE of array section does not have integral type",
5244 length);
5245 return error_mark_node;
5246 }
5247 if (low_bound)
5248 low_bound = mark_rvalue_use (low_bound);
5249 if (length)
5250 length = mark_rvalue_use (length);
5251 /* We need to reduce to real constant-values for checks below. */
5252 if (length)
5253 length = fold_simple (length);
5254 if (low_bound)
5255 low_bound = fold_simple (low_bound);
5256 if (low_bound
5257 && TREE_CODE (low_bound) == INTEGER_CST
5258 && TYPE_PRECISION (TREE_TYPE (low_bound))
5259 > TYPE_PRECISION (sizetype))
5260 low_bound = fold_convert (sizetype, low_bound);
5261 if (length
5262 && TREE_CODE (length) == INTEGER_CST
5263 && TYPE_PRECISION (TREE_TYPE (length))
5264 > TYPE_PRECISION (sizetype))
5265 length = fold_convert (sizetype, length);
5266 if (low_bound == NULL_TREE)
5267 low_bound = integer_zero_node;
5268
5269 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
5270 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
5271 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH))
5272 {
5273 if (length != integer_one_node)
5274 {
5275 error_at (OMP_CLAUSE_LOCATION (c),
5276 "expected single pointer in %qs clause",
5277 user_omp_clause_code_name (c, ort == C_ORT_ACC));
5278 return error_mark_node;
5279 }
5280 }
5281 if (length != NULL_TREE)
5282 {
5283 if (!integer_nonzerop (length))
5284 {
5285 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY
5286 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
5287 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5288 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5289 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
5290 {
5291 if (integer_zerop (length))
5292 {
5293 error_at (OMP_CLAUSE_LOCATION (c),
5294 "zero length array section in %qs clause",
5295 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
5296 return error_mark_node;
5297 }
5298 }
5299 else
5300 maybe_zero_len = true;
5301 }
5302 if (first_non_one == types.length ()
5303 && (TREE_CODE (length) != INTEGER_CST || integer_onep (length)))
5304 first_non_one++;
5305 }
5306 if (TREE_CODE (type) == ARRAY_TYPE)
5307 {
5308 if (length == NULL_TREE
5309 && (TYPE_DOMAIN (type) == NULL_TREE
5310 || TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE))
5311 {
5312 error_at (OMP_CLAUSE_LOCATION (c),
5313 "for unknown bound array type length expression must "
5314 "be specified");
5315 return error_mark_node;
5316 }
5317 if (TREE_CODE (low_bound) == INTEGER_CST
5318 && tree_int_cst_sgn (low_bound) == -1)
5319 {
5320 error_at (OMP_CLAUSE_LOCATION (c),
5321 "negative low bound in array section in %qs clause",
5322 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
5323 return error_mark_node;
5324 }
5325 if (length != NULL_TREE
5326 && TREE_CODE (length) == INTEGER_CST
5327 && tree_int_cst_sgn (length) == -1)
5328 {
5329 error_at (OMP_CLAUSE_LOCATION (c),
5330 "negative length in array section in %qs clause",
5331 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
5332 return error_mark_node;
5333 }
5334 if (TYPE_DOMAIN (type)
5335 && TYPE_MAX_VALUE (TYPE_DOMAIN (type))
5336 && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
5337 == INTEGER_CST)
5338 {
5339 tree size
5340 = fold_convert (sizetype, TYPE_MAX_VALUE (TYPE_DOMAIN (type)));
5341 size = size_binop (PLUS_EXPR, size, size_one_node);
5342 if (TREE_CODE (low_bound) == INTEGER_CST)
5343 {
5344 if (tree_int_cst_lt (size, low_bound))
5345 {
5346 error_at (OMP_CLAUSE_LOCATION (c),
5347 "low bound %qE above array section size "
5348 "in %qs clause", low_bound,
5349 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
5350 return error_mark_node;
5351 }
5352 if (tree_int_cst_equal (size, low_bound))
5353 {
5354 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY
5355 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
5356 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5357 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5358 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
5359 {
5360 error_at (OMP_CLAUSE_LOCATION (c),
5361 "zero length array section in %qs clause",
5362 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
5363 return error_mark_node;
5364 }
5365 maybe_zero_len = true;
5366 }
5367 else if (length == NULL_TREE
5368 && first_non_one == types.length ()
5369 && tree_int_cst_equal
5370 (TYPE_MAX_VALUE (TYPE_DOMAIN (type)),
5371 low_bound))
5372 first_non_one++;
5373 }
5374 else if (length == NULL_TREE)
5375 {
5376 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY
5377 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
5378 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5379 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION
5380 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TASK_REDUCTION)
5381 maybe_zero_len = true;
5382 if (first_non_one == types.length ())
5383 first_non_one++;
5384 }
5385 if (length && TREE_CODE (length) == INTEGER_CST)
5386 {
5387 if (tree_int_cst_lt (size, length))
5388 {
5389 error_at (OMP_CLAUSE_LOCATION (c),
5390 "length %qE above array section size "
5391 "in %qs clause", length,
5392 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
5393 return error_mark_node;
5394 }
5395 if (TREE_CODE (low_bound) == INTEGER_CST)
5396 {
5397 tree lbpluslen
5398 = size_binop (PLUS_EXPR,
5399 fold_convert (sizetype, low_bound),
5400 fold_convert (sizetype, length));
5401 if (TREE_CODE (lbpluslen) == INTEGER_CST
5402 && tree_int_cst_lt (size, lbpluslen))
5403 {
5404 error_at (OMP_CLAUSE_LOCATION (c),
5405 "high bound %qE above array section size "
5406 "in %qs clause", lbpluslen,
5407 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
5408 return error_mark_node;
5409 }
5410 }
5411 }
5412 }
5413 else if (length == NULL_TREE)
5414 {
5415 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY
5416 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
5417 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5418 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION
5419 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TASK_REDUCTION)
5420 maybe_zero_len = true;
5421 if (first_non_one == types.length ())
5422 first_non_one++;
5423 }
5424
5425 /* For [lb:] we will need to evaluate lb more than once. */
5426 if (length == NULL_TREE && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
5427 {
5428 tree lb = cp_save_expr (low_bound);
5429 if (lb != low_bound)
5430 {
5431 TREE_PURPOSE (t) = lb;
5432 low_bound = lb;
5433 }
5434 }
5435 }
5436 else if (TYPE_PTR_P (type))
5437 {
5438 if (length == NULL_TREE)
5439 {
5440 if (TREE_CODE (ret) == PARM_DECL && DECL_ARRAY_PARAMETER_P (ret))
5441 error_at (OMP_CLAUSE_LOCATION (c),
5442 "for array function parameter length expression "
5443 "must be specified");
5444 else
5445 error_at (OMP_CLAUSE_LOCATION (c),
5446 "for pointer type length expression must be specified");
5447 return error_mark_node;
5448 }
5449 if (length != NULL_TREE
5450 && TREE_CODE (length) == INTEGER_CST
5451 && tree_int_cst_sgn (length) == -1)
5452 {
5453 error_at (OMP_CLAUSE_LOCATION (c),
5454 "negative length in array section in %qs clause",
5455 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
5456 return error_mark_node;
5457 }
5458 /* If there is a pointer type anywhere but in the very first
5459 array-section-subscript, the array section could be non-contiguous. */
5460 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_AFFINITY
5461 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
5462 && TREE_CODE (TREE_CHAIN (t)) == TREE_LIST)
5463 {
5464 /* If any prior dimension has a non-one length, then deem this
5465 array section as non-contiguous. */
5466 for (tree d = TREE_CHAIN (t); TREE_CODE (d) == TREE_LIST;
5467 d = TREE_CHAIN (d))
5468 {
5469 tree d_length = TREE_VALUE (d);
5470 if (d_length == NULL_TREE || !integer_onep (d_length))
5471 {
5472 error_at (OMP_CLAUSE_LOCATION (c),
5473 "array section is not contiguous in %qs clause",
5474 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
5475 return error_mark_node;
5476 }
5477 }
5478 }
5479 }
5480 else
5481 {
5482 error_at (OMP_CLAUSE_LOCATION (c),
5483 "%qE does not have pointer or array type", ret);
5484 return error_mark_node;
5485 }
5486 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
5487 types.safe_push (TREE_TYPE (ret));
5488 /* We will need to evaluate lb more than once. */
5489 tree lb = cp_save_expr (low_bound);
5490 if (lb != low_bound)
5491 {
5492 TREE_PURPOSE (t) = lb;
5493 low_bound = lb;
5494 }
5495 /* Temporarily disable -fstrong-eval-order for array reductions.
5496 The SAVE_EXPR and COMPOUND_EXPR added if low_bound has side-effects
5497 is something the middle-end can't cope with and more importantly,
5498 it needs to be the actual base variable that is privatized, not some
5499 temporary assigned previous value of it. That, together with OpenMP
5500 saying how many times the side-effects are evaluated is unspecified,
5501 makes int *a, *b; ... reduction(+:a[a = b, 3:10]) really unspecified. */
5502 warning_sentinel s (flag_strong_eval_order,
5503 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5504 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5505 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION);
5506 ret = grok_array_decl (OMP_CLAUSE_LOCATION (c), ret, low_bound, NULL,
5507 tf_warning_or_error);
5508 return ret;
5509 }
5510
5511 /* Handle array sections for clause C. */
5512
5513 static bool
handle_omp_array_sections(tree c,enum c_omp_region_type ort)5514 handle_omp_array_sections (tree c, enum c_omp_region_type ort)
5515 {
5516 bool maybe_zero_len = false;
5517 unsigned int first_non_one = 0;
5518 auto_vec<tree, 10> types;
5519 tree *tp = &OMP_CLAUSE_DECL (c);
5520 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
5521 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
5522 && TREE_CODE (*tp) == TREE_LIST
5523 && TREE_PURPOSE (*tp)
5524 && TREE_CODE (TREE_PURPOSE (*tp)) == TREE_VEC)
5525 tp = &TREE_VALUE (*tp);
5526 tree first = handle_omp_array_sections_1 (c, *tp, types,
5527 maybe_zero_len, first_non_one,
5528 ort);
5529 if (first == error_mark_node)
5530 return true;
5531 if (first == NULL_TREE)
5532 return false;
5533 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
5534 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
5535 {
5536 tree t = *tp;
5537 tree tem = NULL_TREE;
5538 if (processing_template_decl)
5539 return false;
5540 /* Need to evaluate side effects in the length expressions
5541 if any. */
5542 while (TREE_CODE (t) == TREE_LIST)
5543 {
5544 if (TREE_VALUE (t) && TREE_SIDE_EFFECTS (TREE_VALUE (t)))
5545 {
5546 if (tem == NULL_TREE)
5547 tem = TREE_VALUE (t);
5548 else
5549 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem),
5550 TREE_VALUE (t), tem);
5551 }
5552 t = TREE_CHAIN (t);
5553 }
5554 if (tem)
5555 first = build2 (COMPOUND_EXPR, TREE_TYPE (first), tem, first);
5556 *tp = first;
5557 }
5558 else
5559 {
5560 unsigned int num = types.length (), i;
5561 tree t, side_effects = NULL_TREE, size = NULL_TREE;
5562 tree condition = NULL_TREE;
5563
5564 if (int_size_in_bytes (TREE_TYPE (first)) <= 0)
5565 maybe_zero_len = true;
5566 if (processing_template_decl && maybe_zero_len)
5567 return false;
5568
5569 for (i = num, t = OMP_CLAUSE_DECL (c); i > 0;
5570 t = TREE_CHAIN (t))
5571 {
5572 tree low_bound = TREE_PURPOSE (t);
5573 tree length = TREE_VALUE (t);
5574
5575 i--;
5576 if (low_bound
5577 && TREE_CODE (low_bound) == INTEGER_CST
5578 && TYPE_PRECISION (TREE_TYPE (low_bound))
5579 > TYPE_PRECISION (sizetype))
5580 low_bound = fold_convert (sizetype, low_bound);
5581 if (length
5582 && TREE_CODE (length) == INTEGER_CST
5583 && TYPE_PRECISION (TREE_TYPE (length))
5584 > TYPE_PRECISION (sizetype))
5585 length = fold_convert (sizetype, length);
5586 if (low_bound == NULL_TREE)
5587 low_bound = integer_zero_node;
5588 if (!maybe_zero_len && i > first_non_one)
5589 {
5590 if (integer_nonzerop (low_bound))
5591 goto do_warn_noncontiguous;
5592 if (length != NULL_TREE
5593 && TREE_CODE (length) == INTEGER_CST
5594 && TYPE_DOMAIN (types[i])
5595 && TYPE_MAX_VALUE (TYPE_DOMAIN (types[i]))
5596 && TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])))
5597 == INTEGER_CST)
5598 {
5599 tree size;
5600 size = size_binop (PLUS_EXPR,
5601 TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])),
5602 size_one_node);
5603 if (!tree_int_cst_equal (length, size))
5604 {
5605 do_warn_noncontiguous:
5606 error_at (OMP_CLAUSE_LOCATION (c),
5607 "array section is not contiguous in %qs "
5608 "clause",
5609 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
5610 return true;
5611 }
5612 }
5613 if (!processing_template_decl
5614 && length != NULL_TREE
5615 && TREE_SIDE_EFFECTS (length))
5616 {
5617 if (side_effects == NULL_TREE)
5618 side_effects = length;
5619 else
5620 side_effects = build2 (COMPOUND_EXPR,
5621 TREE_TYPE (side_effects),
5622 length, side_effects);
5623 }
5624 }
5625 else if (processing_template_decl)
5626 continue;
5627 else
5628 {
5629 tree l;
5630
5631 if (i > first_non_one
5632 && ((length && integer_nonzerop (length))
5633 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5634 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5635 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION))
5636 continue;
5637 if (length)
5638 l = fold_convert (sizetype, length);
5639 else
5640 {
5641 l = size_binop (PLUS_EXPR,
5642 TYPE_MAX_VALUE (TYPE_DOMAIN (types[i])),
5643 size_one_node);
5644 l = size_binop (MINUS_EXPR, l,
5645 fold_convert (sizetype, low_bound));
5646 }
5647 if (i > first_non_one)
5648 {
5649 l = fold_build2 (NE_EXPR, boolean_type_node, l,
5650 size_zero_node);
5651 if (condition == NULL_TREE)
5652 condition = l;
5653 else
5654 condition = fold_build2 (BIT_AND_EXPR, boolean_type_node,
5655 l, condition);
5656 }
5657 else if (size == NULL_TREE)
5658 {
5659 size = size_in_bytes (TREE_TYPE (types[i]));
5660 tree eltype = TREE_TYPE (types[num - 1]);
5661 while (TREE_CODE (eltype) == ARRAY_TYPE)
5662 eltype = TREE_TYPE (eltype);
5663 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5664 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5665 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
5666 size = size_binop (EXACT_DIV_EXPR, size,
5667 size_in_bytes (eltype));
5668 size = size_binop (MULT_EXPR, size, l);
5669 if (condition)
5670 size = fold_build3 (COND_EXPR, sizetype, condition,
5671 size, size_zero_node);
5672 }
5673 else
5674 size = size_binop (MULT_EXPR, size, l);
5675 }
5676 }
5677 if (!processing_template_decl)
5678 {
5679 if (side_effects)
5680 size = build2 (COMPOUND_EXPR, sizetype, side_effects, size);
5681 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5682 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5683 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
5684 {
5685 size = size_binop (MINUS_EXPR, size, size_one_node);
5686 size = save_expr (size);
5687 tree index_type = build_index_type (size);
5688 tree eltype = TREE_TYPE (first);
5689 while (TREE_CODE (eltype) == ARRAY_TYPE)
5690 eltype = TREE_TYPE (eltype);
5691 tree type = build_array_type (eltype, index_type);
5692 tree ptype = build_pointer_type (eltype);
5693 if (TYPE_REF_P (TREE_TYPE (t))
5694 && INDIRECT_TYPE_P (TREE_TYPE (TREE_TYPE (t))))
5695 t = convert_from_reference (t);
5696 else if (TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE)
5697 t = build_fold_addr_expr (t);
5698 tree t2 = build_fold_addr_expr (first);
5699 t2 = fold_convert_loc (OMP_CLAUSE_LOCATION (c),
5700 ptrdiff_type_node, t2);
5701 t2 = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MINUS_EXPR,
5702 ptrdiff_type_node, t2,
5703 fold_convert_loc (OMP_CLAUSE_LOCATION (c),
5704 ptrdiff_type_node, t));
5705 if (tree_fits_shwi_p (t2))
5706 t = build2 (MEM_REF, type, t,
5707 build_int_cst (ptype, tree_to_shwi (t2)));
5708 else
5709 {
5710 t2 = fold_convert_loc (OMP_CLAUSE_LOCATION (c),
5711 sizetype, t2);
5712 t = build2_loc (OMP_CLAUSE_LOCATION (c), POINTER_PLUS_EXPR,
5713 TREE_TYPE (t), t, t2);
5714 t = build2 (MEM_REF, type, t, build_int_cst (ptype, 0));
5715 }
5716 OMP_CLAUSE_DECL (c) = t;
5717 return false;
5718 }
5719 OMP_CLAUSE_DECL (c) = first;
5720 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
5721 return false;
5722 OMP_CLAUSE_SIZE (c) = size;
5723 if (TREE_CODE (t) == FIELD_DECL)
5724 t = finish_non_static_data_member (t, NULL_TREE, NULL_TREE);
5725 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
5726 || (TREE_CODE (t) == COMPONENT_REF
5727 && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE))
5728 return false;
5729 switch (OMP_CLAUSE_MAP_KIND (c))
5730 {
5731 case GOMP_MAP_ALLOC:
5732 case GOMP_MAP_IF_PRESENT:
5733 case GOMP_MAP_TO:
5734 case GOMP_MAP_FROM:
5735 case GOMP_MAP_TOFROM:
5736 case GOMP_MAP_ALWAYS_TO:
5737 case GOMP_MAP_ALWAYS_FROM:
5738 case GOMP_MAP_ALWAYS_TOFROM:
5739 case GOMP_MAP_RELEASE:
5740 case GOMP_MAP_DELETE:
5741 case GOMP_MAP_FORCE_TO:
5742 case GOMP_MAP_FORCE_FROM:
5743 case GOMP_MAP_FORCE_TOFROM:
5744 case GOMP_MAP_FORCE_PRESENT:
5745 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c) = 1;
5746 break;
5747 default:
5748 break;
5749 }
5750 bool reference_always_pointer = true;
5751 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
5752 OMP_CLAUSE_MAP);
5753 if (TREE_CODE (t) == COMPONENT_REF)
5754 {
5755 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ATTACH_DETACH);
5756
5757 if ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP
5758 && TYPE_REF_P (TREE_TYPE (t)))
5759 {
5760 if (TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == ARRAY_TYPE)
5761 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALWAYS_POINTER);
5762 else
5763 t = convert_from_reference (t);
5764
5765 reference_always_pointer = false;
5766 }
5767 }
5768 else if (REFERENCE_REF_P (t)
5769 && TREE_CODE (TREE_OPERAND (t, 0)) == COMPONENT_REF)
5770 {
5771 gomp_map_kind k;
5772 if ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP
5773 && TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
5774 k = GOMP_MAP_ATTACH_DETACH;
5775 else
5776 {
5777 t = TREE_OPERAND (t, 0);
5778 k = (ort == C_ORT_ACC
5779 ? GOMP_MAP_ATTACH_DETACH : GOMP_MAP_ALWAYS_POINTER);
5780 }
5781 OMP_CLAUSE_SET_MAP_KIND (c2, k);
5782 }
5783 else
5784 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FIRSTPRIVATE_POINTER);
5785 OMP_CLAUSE_MAP_IMPLICIT (c2) = OMP_CLAUSE_MAP_IMPLICIT (c);
5786 if (OMP_CLAUSE_MAP_KIND (c2) != GOMP_MAP_FIRSTPRIVATE_POINTER
5787 && !cxx_mark_addressable (t))
5788 return false;
5789 OMP_CLAUSE_DECL (c2) = t;
5790 t = build_fold_addr_expr (first);
5791 t = fold_convert_loc (OMP_CLAUSE_LOCATION (c),
5792 ptrdiff_type_node, t);
5793 tree ptr = OMP_CLAUSE_DECL (c2);
5794 ptr = convert_from_reference (ptr);
5795 if (!INDIRECT_TYPE_P (TREE_TYPE (ptr)))
5796 ptr = build_fold_addr_expr (ptr);
5797 t = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MINUS_EXPR,
5798 ptrdiff_type_node, t,
5799 fold_convert_loc (OMP_CLAUSE_LOCATION (c),
5800 ptrdiff_type_node, ptr));
5801 OMP_CLAUSE_SIZE (c2) = t;
5802 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (c);
5803 OMP_CLAUSE_CHAIN (c) = c2;
5804
5805 ptr = OMP_CLAUSE_DECL (c2);
5806 if (reference_always_pointer
5807 && OMP_CLAUSE_MAP_KIND (c2) != GOMP_MAP_FIRSTPRIVATE_POINTER
5808 && TYPE_REF_P (TREE_TYPE (ptr))
5809 && INDIRECT_TYPE_P (TREE_TYPE (TREE_TYPE (ptr))))
5810 {
5811 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
5812 OMP_CLAUSE_MAP);
5813 OMP_CLAUSE_SET_MAP_KIND (c3, OMP_CLAUSE_MAP_KIND (c2));
5814 OMP_CLAUSE_MAP_IMPLICIT (c2) = OMP_CLAUSE_MAP_IMPLICIT (c);
5815 OMP_CLAUSE_DECL (c3) = ptr;
5816 if (OMP_CLAUSE_MAP_KIND (c2) == GOMP_MAP_ALWAYS_POINTER
5817 || OMP_CLAUSE_MAP_KIND (c2) == GOMP_MAP_ATTACH_DETACH)
5818 {
5819 OMP_CLAUSE_DECL (c2) = build_simple_mem_ref (ptr);
5820 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALWAYS_POINTER);
5821 }
5822 else
5823 OMP_CLAUSE_DECL (c2) = convert_from_reference (ptr);
5824 OMP_CLAUSE_SIZE (c3) = size_zero_node;
5825 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
5826 OMP_CLAUSE_CHAIN (c2) = c3;
5827 }
5828 }
5829 }
5830 return false;
5831 }
5832
5833 /* Return identifier to look up for omp declare reduction. */
5834
5835 tree
omp_reduction_id(enum tree_code reduction_code,tree reduction_id,tree type)5836 omp_reduction_id (enum tree_code reduction_code, tree reduction_id, tree type)
5837 {
5838 const char *p = NULL;
5839 const char *m = NULL;
5840 switch (reduction_code)
5841 {
5842 case PLUS_EXPR:
5843 case MULT_EXPR:
5844 case MINUS_EXPR:
5845 case BIT_AND_EXPR:
5846 case BIT_XOR_EXPR:
5847 case BIT_IOR_EXPR:
5848 case TRUTH_ANDIF_EXPR:
5849 case TRUTH_ORIF_EXPR:
5850 reduction_id = ovl_op_identifier (false, reduction_code);
5851 break;
5852 case MIN_EXPR:
5853 p = "min";
5854 break;
5855 case MAX_EXPR:
5856 p = "max";
5857 break;
5858 default:
5859 break;
5860 }
5861
5862 if (p == NULL)
5863 {
5864 if (TREE_CODE (reduction_id) != IDENTIFIER_NODE)
5865 return error_mark_node;
5866 p = IDENTIFIER_POINTER (reduction_id);
5867 }
5868
5869 if (type != NULL_TREE)
5870 m = mangle_type_string (TYPE_MAIN_VARIANT (type));
5871
5872 const char prefix[] = "omp declare reduction ";
5873 size_t lenp = sizeof (prefix);
5874 if (strncmp (p, prefix, lenp - 1) == 0)
5875 lenp = 1;
5876 size_t len = strlen (p);
5877 size_t lenm = m ? strlen (m) + 1 : 0;
5878 char *name = XALLOCAVEC (char, lenp + len + lenm);
5879 if (lenp > 1)
5880 memcpy (name, prefix, lenp - 1);
5881 memcpy (name + lenp - 1, p, len + 1);
5882 if (m)
5883 {
5884 name[lenp + len - 1] = '~';
5885 memcpy (name + lenp + len, m, lenm);
5886 }
5887 return get_identifier (name);
5888 }
5889
5890 /* Lookup OpenMP UDR ID for TYPE, return the corresponding artificial
5891 FUNCTION_DECL or NULL_TREE if not found. */
5892
5893 static tree
omp_reduction_lookup(location_t loc,tree id,tree type,tree * baselinkp,vec<tree> * ambiguousp)5894 omp_reduction_lookup (location_t loc, tree id, tree type, tree *baselinkp,
5895 vec<tree> *ambiguousp)
5896 {
5897 tree orig_id = id;
5898 tree baselink = NULL_TREE;
5899 if (identifier_p (id))
5900 {
5901 cp_id_kind idk;
5902 bool nonint_cst_expression_p;
5903 const char *error_msg;
5904 id = omp_reduction_id (ERROR_MARK, id, type);
5905 tree decl = lookup_name (id);
5906 if (decl == NULL_TREE)
5907 decl = error_mark_node;
5908 id = finish_id_expression (id, decl, NULL_TREE, &idk, false, true,
5909 &nonint_cst_expression_p, false, true, false,
5910 false, &error_msg, loc);
5911 if (idk == CP_ID_KIND_UNQUALIFIED
5912 && identifier_p (id))
5913 {
5914 vec<tree, va_gc> *args = NULL;
5915 vec_safe_push (args, build_reference_type (type));
5916 id = perform_koenig_lookup (id, args, tf_none);
5917 }
5918 }
5919 else if (TREE_CODE (id) == SCOPE_REF)
5920 id = lookup_qualified_name (TREE_OPERAND (id, 0),
5921 omp_reduction_id (ERROR_MARK,
5922 TREE_OPERAND (id, 1),
5923 type),
5924 LOOK_want::NORMAL, false);
5925 tree fns = id;
5926 id = NULL_TREE;
5927 if (fns && is_overloaded_fn (fns))
5928 {
5929 for (lkp_iterator iter (get_fns (fns)); iter; ++iter)
5930 {
5931 tree fndecl = *iter;
5932 if (TREE_CODE (fndecl) == FUNCTION_DECL)
5933 {
5934 tree argtype = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)));
5935 if (same_type_p (TREE_TYPE (argtype), type))
5936 {
5937 id = fndecl;
5938 break;
5939 }
5940 }
5941 }
5942
5943 if (id && BASELINK_P (fns))
5944 {
5945 if (baselinkp)
5946 *baselinkp = fns;
5947 else
5948 baselink = fns;
5949 }
5950 }
5951
5952 if (!id && CLASS_TYPE_P (type) && TYPE_BINFO (type))
5953 {
5954 auto_vec<tree> ambiguous;
5955 tree binfo = TYPE_BINFO (type), base_binfo, ret = NULL_TREE;
5956 unsigned int ix;
5957 if (ambiguousp == NULL)
5958 ambiguousp = &ambiguous;
5959 for (ix = 0; BINFO_BASE_ITERATE (binfo, ix, base_binfo); ix++)
5960 {
5961 id = omp_reduction_lookup (loc, orig_id, BINFO_TYPE (base_binfo),
5962 baselinkp ? baselinkp : &baselink,
5963 ambiguousp);
5964 if (id == NULL_TREE)
5965 continue;
5966 if (!ambiguousp->is_empty ())
5967 ambiguousp->safe_push (id);
5968 else if (ret != NULL_TREE)
5969 {
5970 ambiguousp->safe_push (ret);
5971 ambiguousp->safe_push (id);
5972 ret = NULL_TREE;
5973 }
5974 else
5975 ret = id;
5976 }
5977 if (ambiguousp != &ambiguous)
5978 return ret;
5979 if (!ambiguous.is_empty ())
5980 {
5981 const char *str = _("candidates are:");
5982 unsigned int idx;
5983 tree udr;
5984 error_at (loc, "user defined reduction lookup is ambiguous");
5985 FOR_EACH_VEC_ELT (ambiguous, idx, udr)
5986 {
5987 inform (DECL_SOURCE_LOCATION (udr), "%s %#qD", str, udr);
5988 if (idx == 0)
5989 str = get_spaces (str);
5990 }
5991 ret = error_mark_node;
5992 baselink = NULL_TREE;
5993 }
5994 id = ret;
5995 }
5996 if (id && baselink)
5997 perform_or_defer_access_check (BASELINK_BINFO (baselink),
5998 id, id, tf_warning_or_error);
5999 return id;
6000 }
6001
6002 /* Helper function for cp_parser_omp_declare_reduction_exprs
6003 and tsubst_omp_udr.
6004 Remove CLEANUP_STMT for data (omp_priv variable).
6005 Also append INIT_EXPR for DECL_INITIAL of omp_priv after its
6006 DECL_EXPR. */
6007
6008 tree
cp_remove_omp_priv_cleanup_stmt(tree * tp,int * walk_subtrees,void * data)6009 cp_remove_omp_priv_cleanup_stmt (tree *tp, int *walk_subtrees, void *data)
6010 {
6011 if (TYPE_P (*tp))
6012 *walk_subtrees = 0;
6013 else if (TREE_CODE (*tp) == CLEANUP_STMT && CLEANUP_DECL (*tp) == (tree) data)
6014 *tp = CLEANUP_BODY (*tp);
6015 else if (TREE_CODE (*tp) == DECL_EXPR)
6016 {
6017 tree decl = DECL_EXPR_DECL (*tp);
6018 if (!processing_template_decl
6019 && decl == (tree) data
6020 && DECL_INITIAL (decl)
6021 && DECL_INITIAL (decl) != error_mark_node)
6022 {
6023 tree list = NULL_TREE;
6024 append_to_statement_list_force (*tp, &list);
6025 tree init_expr = build2 (INIT_EXPR, void_type_node,
6026 decl, DECL_INITIAL (decl));
6027 DECL_INITIAL (decl) = NULL_TREE;
6028 append_to_statement_list_force (init_expr, &list);
6029 *tp = list;
6030 }
6031 }
6032 return NULL_TREE;
6033 }
6034
6035 /* Data passed from cp_check_omp_declare_reduction to
6036 cp_check_omp_declare_reduction_r. */
6037
6038 struct cp_check_omp_declare_reduction_data
6039 {
6040 location_t loc;
6041 tree stmts[7];
6042 bool combiner_p;
6043 };
6044
6045 /* Helper function for cp_check_omp_declare_reduction, called via
6046 cp_walk_tree. */
6047
6048 static tree
cp_check_omp_declare_reduction_r(tree * tp,int *,void * data)6049 cp_check_omp_declare_reduction_r (tree *tp, int *, void *data)
6050 {
6051 struct cp_check_omp_declare_reduction_data *udr_data
6052 = (struct cp_check_omp_declare_reduction_data *) data;
6053 if (SSA_VAR_P (*tp)
6054 && !DECL_ARTIFICIAL (*tp)
6055 && *tp != DECL_EXPR_DECL (udr_data->stmts[udr_data->combiner_p ? 0 : 3])
6056 && *tp != DECL_EXPR_DECL (udr_data->stmts[udr_data->combiner_p ? 1 : 4]))
6057 {
6058 location_t loc = udr_data->loc;
6059 if (udr_data->combiner_p)
6060 error_at (loc, "%<#pragma omp declare reduction%> combiner refers to "
6061 "variable %qD which is not %<omp_out%> nor %<omp_in%>",
6062 *tp);
6063 else
6064 error_at (loc, "%<#pragma omp declare reduction%> initializer refers "
6065 "to variable %qD which is not %<omp_priv%> nor "
6066 "%<omp_orig%>",
6067 *tp);
6068 return *tp;
6069 }
6070 return NULL_TREE;
6071 }
6072
6073 /* Diagnose violation of OpenMP #pragma omp declare reduction restrictions. */
6074
6075 bool
cp_check_omp_declare_reduction(tree udr)6076 cp_check_omp_declare_reduction (tree udr)
6077 {
6078 tree type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (udr)));
6079 gcc_assert (TYPE_REF_P (type));
6080 type = TREE_TYPE (type);
6081 int i;
6082 location_t loc = DECL_SOURCE_LOCATION (udr);
6083
6084 if (type == error_mark_node)
6085 return false;
6086 if (ARITHMETIC_TYPE_P (type))
6087 {
6088 static enum tree_code predef_codes[]
6089 = { PLUS_EXPR, MULT_EXPR, MINUS_EXPR, BIT_AND_EXPR, BIT_XOR_EXPR,
6090 BIT_IOR_EXPR, TRUTH_ANDIF_EXPR, TRUTH_ORIF_EXPR };
6091 for (i = 0; i < 8; i++)
6092 {
6093 tree id = omp_reduction_id (predef_codes[i], NULL_TREE, NULL_TREE);
6094 const char *n1 = IDENTIFIER_POINTER (DECL_NAME (udr));
6095 const char *n2 = IDENTIFIER_POINTER (id);
6096 if (strncmp (n1, n2, IDENTIFIER_LENGTH (id)) == 0
6097 && (n1[IDENTIFIER_LENGTH (id)] == '~'
6098 || n1[IDENTIFIER_LENGTH (id)] == '\0'))
6099 break;
6100 }
6101
6102 if (i == 8
6103 && TREE_CODE (type) != COMPLEX_EXPR)
6104 {
6105 const char prefix_minmax[] = "omp declare reduction m";
6106 size_t prefix_size = sizeof (prefix_minmax) - 1;
6107 const char *n = IDENTIFIER_POINTER (DECL_NAME (udr));
6108 if (strncmp (IDENTIFIER_POINTER (DECL_NAME (udr)),
6109 prefix_minmax, prefix_size) == 0
6110 && ((n[prefix_size] == 'i' && n[prefix_size + 1] == 'n')
6111 || (n[prefix_size] == 'a' && n[prefix_size + 1] == 'x'))
6112 && (n[prefix_size + 2] == '~' || n[prefix_size + 2] == '\0'))
6113 i = 0;
6114 }
6115 if (i < 8)
6116 {
6117 error_at (loc, "predeclared arithmetic type %qT in "
6118 "%<#pragma omp declare reduction%>", type);
6119 return false;
6120 }
6121 }
6122 else if (FUNC_OR_METHOD_TYPE_P (type)
6123 || TREE_CODE (type) == ARRAY_TYPE)
6124 {
6125 error_at (loc, "function or array type %qT in "
6126 "%<#pragma omp declare reduction%>", type);
6127 return false;
6128 }
6129 else if (TYPE_REF_P (type))
6130 {
6131 error_at (loc, "reference type %qT in %<#pragma omp declare reduction%>",
6132 type);
6133 return false;
6134 }
6135 else if (TYPE_QUALS_NO_ADDR_SPACE (type))
6136 {
6137 error_at (loc, "%<const%>, %<volatile%> or %<__restrict%>-qualified "
6138 "type %qT in %<#pragma omp declare reduction%>", type);
6139 return false;
6140 }
6141
6142 tree body = DECL_SAVED_TREE (udr);
6143 if (body == NULL_TREE || TREE_CODE (body) != STATEMENT_LIST)
6144 return true;
6145
6146 tree_stmt_iterator tsi;
6147 struct cp_check_omp_declare_reduction_data data;
6148 memset (data.stmts, 0, sizeof data.stmts);
6149 for (i = 0, tsi = tsi_start (body);
6150 i < 7 && !tsi_end_p (tsi);
6151 i++, tsi_next (&tsi))
6152 data.stmts[i] = tsi_stmt (tsi);
6153 data.loc = loc;
6154 gcc_assert (tsi_end_p (tsi));
6155 if (i >= 3)
6156 {
6157 gcc_assert (TREE_CODE (data.stmts[0]) == DECL_EXPR
6158 && TREE_CODE (data.stmts[1]) == DECL_EXPR);
6159 if (warning_suppressed_p (DECL_EXPR_DECL (data.stmts[0]) /* What warning? */))
6160 return true;
6161 data.combiner_p = true;
6162 if (cp_walk_tree (&data.stmts[2], cp_check_omp_declare_reduction_r,
6163 &data, NULL))
6164 suppress_warning (DECL_EXPR_DECL (data.stmts[0]) /* What warning? */);
6165 }
6166 if (i >= 6)
6167 {
6168 gcc_assert (TREE_CODE (data.stmts[3]) == DECL_EXPR
6169 && TREE_CODE (data.stmts[4]) == DECL_EXPR);
6170 data.combiner_p = false;
6171 if (cp_walk_tree (&data.stmts[5], cp_check_omp_declare_reduction_r,
6172 &data, NULL)
6173 || cp_walk_tree (&DECL_INITIAL (DECL_EXPR_DECL (data.stmts[3])),
6174 cp_check_omp_declare_reduction_r, &data, NULL))
6175 suppress_warning (DECL_EXPR_DECL (data.stmts[0]) /* Wat warning? */);
6176 if (i == 7)
6177 gcc_assert (TREE_CODE (data.stmts[6]) == DECL_EXPR);
6178 }
6179 return true;
6180 }
6181
6182 /* Helper function of finish_omp_clauses. Clone STMT as if we were making
6183 an inline call. But, remap
6184 the OMP_DECL1 VAR_DECL (omp_out resp. omp_orig) to PLACEHOLDER
6185 and OMP_DECL2 VAR_DECL (omp_in resp. omp_priv) to DECL. */
6186
6187 static tree
clone_omp_udr(tree stmt,tree omp_decl1,tree omp_decl2,tree decl,tree placeholder)6188 clone_omp_udr (tree stmt, tree omp_decl1, tree omp_decl2,
6189 tree decl, tree placeholder)
6190 {
6191 copy_body_data id;
6192 hash_map<tree, tree> decl_map;
6193
6194 decl_map.put (omp_decl1, placeholder);
6195 decl_map.put (omp_decl2, decl);
6196 memset (&id, 0, sizeof (id));
6197 id.src_fn = DECL_CONTEXT (omp_decl1);
6198 id.dst_fn = current_function_decl;
6199 id.src_cfun = DECL_STRUCT_FUNCTION (id.src_fn);
6200 id.decl_map = &decl_map;
6201
6202 id.copy_decl = copy_decl_no_change;
6203 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
6204 id.transform_new_cfg = true;
6205 id.transform_return_to_modify = false;
6206 id.eh_lp_nr = 0;
6207 walk_tree (&stmt, copy_tree_body_r, &id, NULL);
6208 return stmt;
6209 }
6210
6211 /* Helper function of finish_omp_clauses, called via cp_walk_tree.
6212 Find OMP_CLAUSE_PLACEHOLDER (passed in DATA) in *TP. */
6213
6214 static tree
find_omp_placeholder_r(tree * tp,int *,void * data)6215 find_omp_placeholder_r (tree *tp, int *, void *data)
6216 {
6217 if (*tp == (tree) data)
6218 return *tp;
6219 return NULL_TREE;
6220 }
6221
6222 /* Helper function of finish_omp_clauses. Handle OMP_CLAUSE_REDUCTION C.
6223 Return true if there is some error and the clause should be removed. */
6224
6225 static bool
finish_omp_reduction_clause(tree c,bool * need_default_ctor,bool * need_dtor)6226 finish_omp_reduction_clause (tree c, bool *need_default_ctor, bool *need_dtor)
6227 {
6228 tree t = OMP_CLAUSE_DECL (c);
6229 bool predefined = false;
6230 if (TREE_CODE (t) == TREE_LIST)
6231 {
6232 gcc_assert (processing_template_decl);
6233 return false;
6234 }
6235 tree type = TREE_TYPE (t);
6236 if (TREE_CODE (t) == MEM_REF)
6237 type = TREE_TYPE (type);
6238 if (TYPE_REF_P (type))
6239 type = TREE_TYPE (type);
6240 if (TREE_CODE (type) == ARRAY_TYPE)
6241 {
6242 tree oatype = type;
6243 gcc_assert (TREE_CODE (t) != MEM_REF);
6244 while (TREE_CODE (type) == ARRAY_TYPE)
6245 type = TREE_TYPE (type);
6246 if (!processing_template_decl)
6247 {
6248 t = require_complete_type (t);
6249 if (t == error_mark_node
6250 || !complete_type_or_else (oatype, NULL_TREE))
6251 return true;
6252 tree size = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (oatype),
6253 TYPE_SIZE_UNIT (type));
6254 if (integer_zerop (size))
6255 {
6256 error_at (OMP_CLAUSE_LOCATION (c),
6257 "%qE in %<reduction%> clause is a zero size array",
6258 omp_clause_printable_decl (t));
6259 return true;
6260 }
6261 size = size_binop (MINUS_EXPR, size, size_one_node);
6262 size = save_expr (size);
6263 tree index_type = build_index_type (size);
6264 tree atype = build_array_type (type, index_type);
6265 tree ptype = build_pointer_type (type);
6266 if (TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE)
6267 t = build_fold_addr_expr (t);
6268 t = build2 (MEM_REF, atype, t, build_int_cst (ptype, 0));
6269 OMP_CLAUSE_DECL (c) = t;
6270 }
6271 }
6272 if (type == error_mark_node)
6273 return true;
6274 else if (ARITHMETIC_TYPE_P (type))
6275 switch (OMP_CLAUSE_REDUCTION_CODE (c))
6276 {
6277 case PLUS_EXPR:
6278 case MULT_EXPR:
6279 case MINUS_EXPR:
6280 case TRUTH_ANDIF_EXPR:
6281 case TRUTH_ORIF_EXPR:
6282 predefined = true;
6283 break;
6284 case MIN_EXPR:
6285 case MAX_EXPR:
6286 if (TREE_CODE (type) == COMPLEX_TYPE)
6287 break;
6288 predefined = true;
6289 break;
6290 case BIT_AND_EXPR:
6291 case BIT_IOR_EXPR:
6292 case BIT_XOR_EXPR:
6293 if (FLOAT_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE)
6294 break;
6295 predefined = true;
6296 break;
6297 default:
6298 break;
6299 }
6300 else if (TYPE_READONLY (type))
6301 {
6302 error_at (OMP_CLAUSE_LOCATION (c),
6303 "%qE has const type for %<reduction%>",
6304 omp_clause_printable_decl (t));
6305 return true;
6306 }
6307 else if (!processing_template_decl)
6308 {
6309 t = require_complete_type (t);
6310 if (t == error_mark_node)
6311 return true;
6312 OMP_CLAUSE_DECL (c) = t;
6313 }
6314
6315 if (predefined)
6316 {
6317 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL_TREE;
6318 return false;
6319 }
6320 else if (processing_template_decl)
6321 {
6322 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == error_mark_node)
6323 return true;
6324 return false;
6325 }
6326
6327 tree id = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6328
6329 type = TYPE_MAIN_VARIANT (type);
6330 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL_TREE;
6331 if (id == NULL_TREE)
6332 id = omp_reduction_id (OMP_CLAUSE_REDUCTION_CODE (c),
6333 NULL_TREE, NULL_TREE);
6334 id = omp_reduction_lookup (OMP_CLAUSE_LOCATION (c), id, type, NULL, NULL);
6335 if (id)
6336 {
6337 if (id == error_mark_node)
6338 return true;
6339 mark_used (id);
6340 tree body = DECL_SAVED_TREE (id);
6341 if (!body)
6342 return true;
6343 if (TREE_CODE (body) == STATEMENT_LIST)
6344 {
6345 tree_stmt_iterator tsi;
6346 tree placeholder = NULL_TREE, decl_placeholder = NULL_TREE;
6347 int i;
6348 tree stmts[7];
6349 tree atype = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (id)));
6350 atype = TREE_TYPE (atype);
6351 bool need_static_cast = !same_type_p (type, atype);
6352 memset (stmts, 0, sizeof stmts);
6353 for (i = 0, tsi = tsi_start (body);
6354 i < 7 && !tsi_end_p (tsi);
6355 i++, tsi_next (&tsi))
6356 stmts[i] = tsi_stmt (tsi);
6357 gcc_assert (tsi_end_p (tsi));
6358
6359 if (i >= 3)
6360 {
6361 gcc_assert (TREE_CODE (stmts[0]) == DECL_EXPR
6362 && TREE_CODE (stmts[1]) == DECL_EXPR);
6363 placeholder = build_lang_decl (VAR_DECL, NULL_TREE, type);
6364 DECL_ARTIFICIAL (placeholder) = 1;
6365 DECL_IGNORED_P (placeholder) = 1;
6366 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = placeholder;
6367 if (TREE_CODE (t) == MEM_REF)
6368 {
6369 decl_placeholder = build_lang_decl (VAR_DECL, NULL_TREE,
6370 type);
6371 DECL_ARTIFICIAL (decl_placeholder) = 1;
6372 DECL_IGNORED_P (decl_placeholder) = 1;
6373 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = decl_placeholder;
6374 }
6375 if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[0])))
6376 cxx_mark_addressable (placeholder);
6377 if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[1]))
6378 && (decl_placeholder
6379 || !TYPE_REF_P (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
6380 cxx_mark_addressable (decl_placeholder ? decl_placeholder
6381 : OMP_CLAUSE_DECL (c));
6382 tree omp_out = placeholder;
6383 tree omp_in = decl_placeholder ? decl_placeholder
6384 : convert_from_reference (OMP_CLAUSE_DECL (c));
6385 if (need_static_cast)
6386 {
6387 tree rtype = build_reference_type (atype);
6388 omp_out = build_static_cast (input_location,
6389 rtype, omp_out,
6390 tf_warning_or_error);
6391 omp_in = build_static_cast (input_location,
6392 rtype, omp_in,
6393 tf_warning_or_error);
6394 if (omp_out == error_mark_node || omp_in == error_mark_node)
6395 return true;
6396 omp_out = convert_from_reference (omp_out);
6397 omp_in = convert_from_reference (omp_in);
6398 }
6399 OMP_CLAUSE_REDUCTION_MERGE (c)
6400 = clone_omp_udr (stmts[2], DECL_EXPR_DECL (stmts[0]),
6401 DECL_EXPR_DECL (stmts[1]), omp_in, omp_out);
6402 }
6403 if (i >= 6)
6404 {
6405 gcc_assert (TREE_CODE (stmts[3]) == DECL_EXPR
6406 && TREE_CODE (stmts[4]) == DECL_EXPR);
6407 if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[3]))
6408 && (decl_placeholder
6409 || !TYPE_REF_P (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
6410 cxx_mark_addressable (decl_placeholder ? decl_placeholder
6411 : OMP_CLAUSE_DECL (c));
6412 if (TREE_ADDRESSABLE (DECL_EXPR_DECL (stmts[4])))
6413 cxx_mark_addressable (placeholder);
6414 tree omp_priv = decl_placeholder ? decl_placeholder
6415 : convert_from_reference (OMP_CLAUSE_DECL (c));
6416 tree omp_orig = placeholder;
6417 if (need_static_cast)
6418 {
6419 if (i == 7)
6420 {
6421 error_at (OMP_CLAUSE_LOCATION (c),
6422 "user defined reduction with constructor "
6423 "initializer for base class %qT", atype);
6424 return true;
6425 }
6426 tree rtype = build_reference_type (atype);
6427 omp_priv = build_static_cast (input_location,
6428 rtype, omp_priv,
6429 tf_warning_or_error);
6430 omp_orig = build_static_cast (input_location,
6431 rtype, omp_orig,
6432 tf_warning_or_error);
6433 if (omp_priv == error_mark_node
6434 || omp_orig == error_mark_node)
6435 return true;
6436 omp_priv = convert_from_reference (omp_priv);
6437 omp_orig = convert_from_reference (omp_orig);
6438 }
6439 if (i == 6)
6440 *need_default_ctor = true;
6441 OMP_CLAUSE_REDUCTION_INIT (c)
6442 = clone_omp_udr (stmts[5], DECL_EXPR_DECL (stmts[4]),
6443 DECL_EXPR_DECL (stmts[3]),
6444 omp_priv, omp_orig);
6445 if (cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
6446 find_omp_placeholder_r, placeholder, NULL))
6447 OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c) = 1;
6448 }
6449 else if (i >= 3)
6450 {
6451 if (CLASS_TYPE_P (type) && !pod_type_p (type))
6452 *need_default_ctor = true;
6453 else
6454 {
6455 tree init;
6456 tree v = decl_placeholder ? decl_placeholder
6457 : convert_from_reference (t);
6458 if (AGGREGATE_TYPE_P (TREE_TYPE (v)))
6459 init = build_constructor (TREE_TYPE (v), NULL);
6460 else
6461 init = fold_convert (TREE_TYPE (v), integer_zero_node);
6462 OMP_CLAUSE_REDUCTION_INIT (c)
6463 = build2 (INIT_EXPR, TREE_TYPE (v), v, init);
6464 }
6465 }
6466 }
6467 }
6468 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6469 *need_dtor = true;
6470 else
6471 {
6472 error_at (OMP_CLAUSE_LOCATION (c),
6473 "user defined reduction not found for %qE",
6474 omp_clause_printable_decl (t));
6475 return true;
6476 }
6477 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6478 gcc_assert (TYPE_SIZE_UNIT (type)
6479 && TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST);
6480 return false;
6481 }
6482
6483 /* Called from finish_struct_1. linear(this) or linear(this:step)
6484 clauses might not be finalized yet because the class has been incomplete
6485 when parsing #pragma omp declare simd methods. Fix those up now. */
6486
6487 void
finish_omp_declare_simd_methods(tree t)6488 finish_omp_declare_simd_methods (tree t)
6489 {
6490 if (processing_template_decl)
6491 return;
6492
6493 for (tree x = TYPE_FIELDS (t); x; x = DECL_CHAIN (x))
6494 {
6495 if (TREE_CODE (x) == USING_DECL
6496 || !DECL_NONSTATIC_MEMBER_FUNCTION_P (x))
6497 continue;
6498 tree ods = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (x));
6499 if (!ods || !TREE_VALUE (ods))
6500 continue;
6501 for (tree c = TREE_VALUE (TREE_VALUE (ods)); c; c = OMP_CLAUSE_CHAIN (c))
6502 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6503 && integer_zerop (OMP_CLAUSE_DECL (c))
6504 && OMP_CLAUSE_LINEAR_STEP (c)
6505 && TYPE_PTR_P (TREE_TYPE (OMP_CLAUSE_LINEAR_STEP (c))))
6506 {
6507 tree s = OMP_CLAUSE_LINEAR_STEP (c);
6508 s = fold_convert_loc (OMP_CLAUSE_LOCATION (c), sizetype, s);
6509 s = fold_build2_loc (OMP_CLAUSE_LOCATION (c), MULT_EXPR,
6510 sizetype, s, TYPE_SIZE_UNIT (t));
6511 OMP_CLAUSE_LINEAR_STEP (c) = s;
6512 }
6513 }
6514 }
6515
6516 /* Adjust sink depend clause to take into account pointer offsets.
6517
6518 Return TRUE if there was a problem processing the offset, and the
6519 whole clause should be removed. */
6520
6521 static bool
cp_finish_omp_clause_depend_sink(tree sink_clause)6522 cp_finish_omp_clause_depend_sink (tree sink_clause)
6523 {
6524 tree t = OMP_CLAUSE_DECL (sink_clause);
6525 gcc_assert (TREE_CODE (t) == TREE_LIST);
6526
6527 /* Make sure we don't adjust things twice for templates. */
6528 if (processing_template_decl)
6529 return false;
6530
6531 for (; t; t = TREE_CHAIN (t))
6532 {
6533 tree decl = TREE_VALUE (t);
6534 if (TYPE_PTR_P (TREE_TYPE (decl)))
6535 {
6536 tree offset = TREE_PURPOSE (t);
6537 bool neg = wi::neg_p (wi::to_wide (offset));
6538 offset = fold_unary (ABS_EXPR, TREE_TYPE (offset), offset);
6539 decl = mark_rvalue_use (decl);
6540 decl = convert_from_reference (decl);
6541 tree t2 = pointer_int_sum (OMP_CLAUSE_LOCATION (sink_clause),
6542 neg ? MINUS_EXPR : PLUS_EXPR,
6543 decl, offset);
6544 t2 = fold_build2_loc (OMP_CLAUSE_LOCATION (sink_clause),
6545 MINUS_EXPR, sizetype,
6546 fold_convert (sizetype, t2),
6547 fold_convert (sizetype, decl));
6548 if (t2 == error_mark_node)
6549 return true;
6550 TREE_PURPOSE (t) = t2;
6551 }
6552 }
6553 return false;
6554 }
6555
6556 /* Finish OpenMP iterators ITER. Return true if they are errorneous
6557 and clauses containing them should be removed. */
6558
6559 static bool
cp_omp_finish_iterators(tree iter)6560 cp_omp_finish_iterators (tree iter)
6561 {
6562 bool ret = false;
6563 for (tree it = iter; it; it = TREE_CHAIN (it))
6564 {
6565 tree var = TREE_VEC_ELT (it, 0);
6566 tree begin = TREE_VEC_ELT (it, 1);
6567 tree end = TREE_VEC_ELT (it, 2);
6568 tree step = TREE_VEC_ELT (it, 3);
6569 tree orig_step;
6570 tree type = TREE_TYPE (var);
6571 location_t loc = DECL_SOURCE_LOCATION (var);
6572 if (type == error_mark_node)
6573 {
6574 ret = true;
6575 continue;
6576 }
6577 if (type_dependent_expression_p (var))
6578 continue;
6579 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
6580 {
6581 error_at (loc, "iterator %qD has neither integral nor pointer type",
6582 var);
6583 ret = true;
6584 continue;
6585 }
6586 else if (TYPE_READONLY (type))
6587 {
6588 error_at (loc, "iterator %qD has const qualified type", var);
6589 ret = true;
6590 continue;
6591 }
6592 if (type_dependent_expression_p (begin)
6593 || type_dependent_expression_p (end)
6594 || type_dependent_expression_p (step))
6595 continue;
6596 else if (error_operand_p (step))
6597 {
6598 ret = true;
6599 continue;
6600 }
6601 else if (!INTEGRAL_TYPE_P (TREE_TYPE (step)))
6602 {
6603 error_at (EXPR_LOC_OR_LOC (step, loc),
6604 "iterator step with non-integral type");
6605 ret = true;
6606 continue;
6607 }
6608
6609 begin = mark_rvalue_use (begin);
6610 end = mark_rvalue_use (end);
6611 step = mark_rvalue_use (step);
6612 begin = cp_build_c_cast (input_location, type, begin,
6613 tf_warning_or_error);
6614 end = cp_build_c_cast (input_location, type, end,
6615 tf_warning_or_error);
6616 orig_step = step;
6617 if (!processing_template_decl)
6618 step = orig_step = save_expr (step);
6619 tree stype = POINTER_TYPE_P (type) ? sizetype : type;
6620 step = cp_build_c_cast (input_location, stype, step,
6621 tf_warning_or_error);
6622 if (POINTER_TYPE_P (type) && !processing_template_decl)
6623 {
6624 begin = save_expr (begin);
6625 step = pointer_int_sum (loc, PLUS_EXPR, begin, step);
6626 step = fold_build2_loc (loc, MINUS_EXPR, sizetype,
6627 fold_convert (sizetype, step),
6628 fold_convert (sizetype, begin));
6629 step = fold_convert (ssizetype, step);
6630 }
6631 if (!processing_template_decl)
6632 {
6633 begin = maybe_constant_value (begin);
6634 end = maybe_constant_value (end);
6635 step = maybe_constant_value (step);
6636 orig_step = maybe_constant_value (orig_step);
6637 }
6638 if (integer_zerop (step))
6639 {
6640 error_at (loc, "iterator %qD has zero step", var);
6641 ret = true;
6642 continue;
6643 }
6644
6645 if (begin == error_mark_node
6646 || end == error_mark_node
6647 || step == error_mark_node
6648 || orig_step == error_mark_node)
6649 {
6650 ret = true;
6651 continue;
6652 }
6653
6654 if (!processing_template_decl)
6655 {
6656 begin = fold_build_cleanup_point_expr (TREE_TYPE (begin), begin);
6657 end = fold_build_cleanup_point_expr (TREE_TYPE (end), end);
6658 step = fold_build_cleanup_point_expr (TREE_TYPE (step), step);
6659 orig_step = fold_build_cleanup_point_expr (TREE_TYPE (orig_step),
6660 orig_step);
6661 }
6662 hash_set<tree> pset;
6663 tree it2;
6664 for (it2 = TREE_CHAIN (it); it2; it2 = TREE_CHAIN (it2))
6665 {
6666 tree var2 = TREE_VEC_ELT (it2, 0);
6667 tree begin2 = TREE_VEC_ELT (it2, 1);
6668 tree end2 = TREE_VEC_ELT (it2, 2);
6669 tree step2 = TREE_VEC_ELT (it2, 3);
6670 location_t loc2 = DECL_SOURCE_LOCATION (var2);
6671 if (cp_walk_tree (&begin2, find_omp_placeholder_r, var, &pset))
6672 {
6673 error_at (EXPR_LOC_OR_LOC (begin2, loc2),
6674 "begin expression refers to outer iterator %qD", var);
6675 break;
6676 }
6677 else if (cp_walk_tree (&end2, find_omp_placeholder_r, var, &pset))
6678 {
6679 error_at (EXPR_LOC_OR_LOC (end2, loc2),
6680 "end expression refers to outer iterator %qD", var);
6681 break;
6682 }
6683 else if (cp_walk_tree (&step2, find_omp_placeholder_r, var, &pset))
6684 {
6685 error_at (EXPR_LOC_OR_LOC (step2, loc2),
6686 "step expression refers to outer iterator %qD", var);
6687 break;
6688 }
6689 }
6690 if (it2)
6691 {
6692 ret = true;
6693 continue;
6694 }
6695 TREE_VEC_ELT (it, 1) = begin;
6696 TREE_VEC_ELT (it, 2) = end;
6697 if (processing_template_decl)
6698 TREE_VEC_ELT (it, 3) = orig_step;
6699 else
6700 {
6701 TREE_VEC_ELT (it, 3) = step;
6702 TREE_VEC_ELT (it, 4) = orig_step;
6703 }
6704 }
6705 return ret;
6706 }
6707
6708 /* Ensure that pointers are used in OpenACC attach and detach clauses.
6709 Return true if an error has been detected. */
6710
6711 static bool
cp_oacc_check_attachments(tree c)6712 cp_oacc_check_attachments (tree c)
6713 {
6714 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
6715 return false;
6716
6717 /* OpenACC attach / detach clauses must be pointers. */
6718 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
6719 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
6720 {
6721 tree t = OMP_CLAUSE_DECL (c);
6722 tree type;
6723
6724 while (TREE_CODE (t) == TREE_LIST)
6725 t = TREE_CHAIN (t);
6726
6727 type = TREE_TYPE (t);
6728
6729 if (TREE_CODE (type) == REFERENCE_TYPE)
6730 type = TREE_TYPE (type);
6731
6732 if (TREE_CODE (type) != POINTER_TYPE)
6733 {
6734 error_at (OMP_CLAUSE_LOCATION (c), "expected pointer in %qs clause",
6735 user_omp_clause_code_name (c, true));
6736 return true;
6737 }
6738 }
6739
6740 return false;
6741 }
6742
6743 /* For all elements of CLAUSES, validate them vs OpenMP constraints.
6744 Remove any elements from the list that are invalid. */
6745
6746 tree
finish_omp_clauses(tree clauses,enum c_omp_region_type ort)6747 finish_omp_clauses (tree clauses, enum c_omp_region_type ort)
6748 {
6749 bitmap_head generic_head, firstprivate_head, lastprivate_head;
6750 bitmap_head aligned_head, map_head, map_field_head, map_firstprivate_head;
6751 bitmap_head oacc_reduction_head, is_on_device_head;
6752 tree c, t, *pc;
6753 tree safelen = NULL_TREE;
6754 bool branch_seen = false;
6755 bool copyprivate_seen = false;
6756 bool ordered_seen = false;
6757 bool order_seen = false;
6758 bool schedule_seen = false;
6759 bool oacc_async = false;
6760 bool indir_component_ref_p = false;
6761 tree last_iterators = NULL_TREE;
6762 bool last_iterators_remove = false;
6763 /* 1 if normal/task reduction has been seen, -1 if inscan reduction
6764 has been seen, -2 if mixed inscan/normal reduction diagnosed. */
6765 int reduction_seen = 0;
6766 bool allocate_seen = false;
6767 tree detach_seen = NULL_TREE;
6768 bool mergeable_seen = false;
6769 bool implicit_moved = false;
6770 bool target_in_reduction_seen = false;
6771
6772 bitmap_obstack_initialize (NULL);
6773 bitmap_initialize (&generic_head, &bitmap_default_obstack);
6774 bitmap_initialize (&firstprivate_head, &bitmap_default_obstack);
6775 bitmap_initialize (&lastprivate_head, &bitmap_default_obstack);
6776 bitmap_initialize (&aligned_head, &bitmap_default_obstack);
6777 /* If ort == C_ORT_OMP_DECLARE_SIMD used as uniform_head instead. */
6778 bitmap_initialize (&map_head, &bitmap_default_obstack);
6779 bitmap_initialize (&map_field_head, &bitmap_default_obstack);
6780 bitmap_initialize (&map_firstprivate_head, &bitmap_default_obstack);
6781 /* If ort == C_ORT_OMP used as nontemporal_head or use_device_xxx_head
6782 instead and for ort == C_ORT_OMP_TARGET used as in_reduction_head. */
6783 bitmap_initialize (&oacc_reduction_head, &bitmap_default_obstack);
6784 bitmap_initialize (&is_on_device_head, &bitmap_default_obstack);
6785
6786 if (ort & C_ORT_ACC)
6787 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6788 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ASYNC)
6789 {
6790 oacc_async = true;
6791 break;
6792 }
6793
6794 for (pc = &clauses, c = clauses; c ; c = *pc)
6795 {
6796 bool remove = false;
6797 bool field_ok = false;
6798
6799 switch (OMP_CLAUSE_CODE (c))
6800 {
6801 case OMP_CLAUSE_SHARED:
6802 field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP);
6803 goto check_dup_generic;
6804 case OMP_CLAUSE_PRIVATE:
6805 field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP);
6806 goto check_dup_generic;
6807 case OMP_CLAUSE_REDUCTION:
6808 if (reduction_seen == 0)
6809 reduction_seen = OMP_CLAUSE_REDUCTION_INSCAN (c) ? -1 : 1;
6810 else if (reduction_seen != -2
6811 && reduction_seen != (OMP_CLAUSE_REDUCTION_INSCAN (c)
6812 ? -1 : 1))
6813 {
6814 error_at (OMP_CLAUSE_LOCATION (c),
6815 "%<inscan%> and non-%<inscan%> %<reduction%> clauses "
6816 "on the same construct");
6817 reduction_seen = -2;
6818 }
6819 /* FALLTHRU */
6820 case OMP_CLAUSE_IN_REDUCTION:
6821 case OMP_CLAUSE_TASK_REDUCTION:
6822 field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP);
6823 t = OMP_CLAUSE_DECL (c);
6824 if (TREE_CODE (t) == TREE_LIST)
6825 {
6826 if (handle_omp_array_sections (c, ort))
6827 {
6828 remove = true;
6829 break;
6830 }
6831 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6832 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6833 {
6834 error_at (OMP_CLAUSE_LOCATION (c),
6835 "%<inscan%> %<reduction%> clause with array "
6836 "section");
6837 remove = true;
6838 break;
6839 }
6840 if (TREE_CODE (t) == TREE_LIST)
6841 {
6842 while (TREE_CODE (t) == TREE_LIST)
6843 t = TREE_CHAIN (t);
6844 }
6845 else
6846 {
6847 gcc_assert (TREE_CODE (t) == MEM_REF);
6848 t = TREE_OPERAND (t, 0);
6849 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
6850 t = TREE_OPERAND (t, 0);
6851 if (TREE_CODE (t) == ADDR_EXPR
6852 || INDIRECT_REF_P (t))
6853 t = TREE_OPERAND (t, 0);
6854 }
6855 tree n = omp_clause_decl_field (t);
6856 if (n)
6857 t = n;
6858 goto check_dup_generic_t;
6859 }
6860 if (oacc_async)
6861 cxx_mark_addressable (t);
6862 goto check_dup_generic;
6863 case OMP_CLAUSE_COPYPRIVATE:
6864 copyprivate_seen = true;
6865 field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP);
6866 goto check_dup_generic;
6867 case OMP_CLAUSE_COPYIN:
6868 goto check_dup_generic;
6869 case OMP_CLAUSE_LINEAR:
6870 field_ok = ((ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP);
6871 t = OMP_CLAUSE_DECL (c);
6872 if (ort != C_ORT_OMP_DECLARE_SIMD
6873 && OMP_CLAUSE_LINEAR_KIND (c) != OMP_CLAUSE_LINEAR_DEFAULT)
6874 {
6875 error_at (OMP_CLAUSE_LOCATION (c),
6876 "modifier should not be specified in %<linear%> "
6877 "clause on %<simd%> or %<for%> constructs");
6878 OMP_CLAUSE_LINEAR_KIND (c) = OMP_CLAUSE_LINEAR_DEFAULT;
6879 }
6880 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL)
6881 && !type_dependent_expression_p (t))
6882 {
6883 tree type = TREE_TYPE (t);
6884 if ((OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_REF
6885 || OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_UVAL)
6886 && !TYPE_REF_P (type))
6887 {
6888 error_at (OMP_CLAUSE_LOCATION (c),
6889 "linear clause with %qs modifier applied to "
6890 "non-reference variable with %qT type",
6891 OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_REF
6892 ? "ref" : "uval", TREE_TYPE (t));
6893 remove = true;
6894 break;
6895 }
6896 if (TYPE_REF_P (type))
6897 type = TREE_TYPE (type);
6898 if (OMP_CLAUSE_LINEAR_KIND (c) != OMP_CLAUSE_LINEAR_REF)
6899 {
6900 if (!INTEGRAL_TYPE_P (type)
6901 && !TYPE_PTR_P (type))
6902 {
6903 error_at (OMP_CLAUSE_LOCATION (c),
6904 "linear clause applied to non-integral "
6905 "non-pointer variable with %qT type",
6906 TREE_TYPE (t));
6907 remove = true;
6908 break;
6909 }
6910 }
6911 }
6912 t = OMP_CLAUSE_LINEAR_STEP (c);
6913 if (t == NULL_TREE)
6914 t = integer_one_node;
6915 if (t == error_mark_node)
6916 {
6917 remove = true;
6918 break;
6919 }
6920 else if (!type_dependent_expression_p (t)
6921 && !INTEGRAL_TYPE_P (TREE_TYPE (t))
6922 && (ort != C_ORT_OMP_DECLARE_SIMD
6923 || TREE_CODE (t) != PARM_DECL
6924 || !TYPE_REF_P (TREE_TYPE (t))
6925 || !INTEGRAL_TYPE_P (TREE_TYPE (TREE_TYPE (t)))))
6926 {
6927 error_at (OMP_CLAUSE_LOCATION (c),
6928 "linear step expression must be integral");
6929 remove = true;
6930 break;
6931 }
6932 else
6933 {
6934 t = mark_rvalue_use (t);
6935 if (ort == C_ORT_OMP_DECLARE_SIMD && TREE_CODE (t) == PARM_DECL)
6936 {
6937 OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c) = 1;
6938 goto check_dup_generic;
6939 }
6940 if (!processing_template_decl
6941 && (VAR_P (OMP_CLAUSE_DECL (c))
6942 || TREE_CODE (OMP_CLAUSE_DECL (c)) == PARM_DECL))
6943 {
6944 if (ort == C_ORT_OMP_DECLARE_SIMD)
6945 {
6946 t = maybe_constant_value (t);
6947 if (TREE_CODE (t) != INTEGER_CST)
6948 {
6949 error_at (OMP_CLAUSE_LOCATION (c),
6950 "%<linear%> clause step %qE is neither "
6951 "constant nor a parameter", t);
6952 remove = true;
6953 break;
6954 }
6955 }
6956 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
6957 tree type = TREE_TYPE (OMP_CLAUSE_DECL (c));
6958 if (TYPE_REF_P (type))
6959 type = TREE_TYPE (type);
6960 if (OMP_CLAUSE_LINEAR_KIND (c) == OMP_CLAUSE_LINEAR_REF)
6961 {
6962 type = build_pointer_type (type);
6963 tree d = fold_convert (type, OMP_CLAUSE_DECL (c));
6964 t = pointer_int_sum (OMP_CLAUSE_LOCATION (c), PLUS_EXPR,
6965 d, t);
6966 t = fold_build2_loc (OMP_CLAUSE_LOCATION (c),
6967 MINUS_EXPR, sizetype,
6968 fold_convert (sizetype, t),
6969 fold_convert (sizetype, d));
6970 if (t == error_mark_node)
6971 {
6972 remove = true;
6973 break;
6974 }
6975 }
6976 else if (TYPE_PTR_P (type)
6977 /* Can't multiply the step yet if *this
6978 is still incomplete type. */
6979 && (ort != C_ORT_OMP_DECLARE_SIMD
6980 || TREE_CODE (OMP_CLAUSE_DECL (c)) != PARM_DECL
6981 || !DECL_ARTIFICIAL (OMP_CLAUSE_DECL (c))
6982 || DECL_NAME (OMP_CLAUSE_DECL (c))
6983 != this_identifier
6984 || !TYPE_BEING_DEFINED (TREE_TYPE (type))))
6985 {
6986 tree d = convert_from_reference (OMP_CLAUSE_DECL (c));
6987 t = pointer_int_sum (OMP_CLAUSE_LOCATION (c), PLUS_EXPR,
6988 d, t);
6989 t = fold_build2_loc (OMP_CLAUSE_LOCATION (c),
6990 MINUS_EXPR, sizetype,
6991 fold_convert (sizetype, t),
6992 fold_convert (sizetype, d));
6993 if (t == error_mark_node)
6994 {
6995 remove = true;
6996 break;
6997 }
6998 }
6999 else
7000 t = fold_convert (type, t);
7001 }
7002 OMP_CLAUSE_LINEAR_STEP (c) = t;
7003 }
7004 goto check_dup_generic;
7005 check_dup_generic:
7006 t = omp_clause_decl_field (OMP_CLAUSE_DECL (c));
7007 if (t)
7008 {
7009 if (!remove && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SHARED)
7010 omp_note_field_privatization (t, OMP_CLAUSE_DECL (c));
7011 }
7012 else
7013 t = OMP_CLAUSE_DECL (c);
7014 check_dup_generic_t:
7015 if (t == current_class_ptr
7016 && ((ort != C_ORT_OMP_DECLARE_SIMD && ort != C_ORT_ACC)
7017 || (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
7018 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_UNIFORM)))
7019 {
7020 error_at (OMP_CLAUSE_LOCATION (c),
7021 "%<this%> allowed in OpenMP only in %<declare simd%>"
7022 " clauses");
7023 remove = true;
7024 break;
7025 }
7026 if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL
7027 && (!field_ok || TREE_CODE (t) != FIELD_DECL))
7028 {
7029 if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
7030 break;
7031 if (DECL_P (t))
7032 error_at (OMP_CLAUSE_LOCATION (c),
7033 "%qD is not a variable in clause %qs", t,
7034 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7035 else
7036 error_at (OMP_CLAUSE_LOCATION (c),
7037 "%qE is not a variable in clause %qs", t,
7038 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7039 remove = true;
7040 }
7041 else if ((ort == C_ORT_ACC
7042 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7043 || (ort == C_ORT_OMP
7044 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR
7045 || (OMP_CLAUSE_CODE (c)
7046 == OMP_CLAUSE_USE_DEVICE_ADDR)))
7047 || (ort == C_ORT_OMP_TARGET
7048 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION))
7049 {
7050 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
7051 && (bitmap_bit_p (&generic_head, DECL_UID (t))
7052 || bitmap_bit_p (&firstprivate_head, DECL_UID (t))))
7053 {
7054 error_at (OMP_CLAUSE_LOCATION (c),
7055 "%qD appears more than once in data-sharing "
7056 "clauses", t);
7057 remove = true;
7058 break;
7059 }
7060 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7061 target_in_reduction_seen = true;
7062 if (bitmap_bit_p (&oacc_reduction_head, DECL_UID (t)))
7063 {
7064 error_at (OMP_CLAUSE_LOCATION (c),
7065 ort == C_ORT_ACC
7066 ? "%qD appears more than once in reduction clauses"
7067 : "%qD appears more than once in data clauses",
7068 t);
7069 remove = true;
7070 }
7071 else
7072 bitmap_set_bit (&oacc_reduction_head, DECL_UID (t));
7073 }
7074 else if (bitmap_bit_p (&generic_head, DECL_UID (t))
7075 || bitmap_bit_p (&firstprivate_head, DECL_UID (t))
7076 || bitmap_bit_p (&lastprivate_head, DECL_UID (t))
7077 || bitmap_bit_p (&map_firstprivate_head, DECL_UID (t)))
7078 {
7079 error_at (OMP_CLAUSE_LOCATION (c),
7080 "%qD appears more than once in data clauses", t);
7081 remove = true;
7082 }
7083 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
7084 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR
7085 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
7086 && bitmap_bit_p (&map_head, DECL_UID (t)))
7087 {
7088 if (ort == C_ORT_ACC)
7089 error_at (OMP_CLAUSE_LOCATION (c),
7090 "%qD appears more than once in data clauses", t);
7091 else
7092 error_at (OMP_CLAUSE_LOCATION (c),
7093 "%qD appears both in data and map clauses", t);
7094 remove = true;
7095 }
7096 else
7097 bitmap_set_bit (&generic_head, DECL_UID (t));
7098 if (!field_ok)
7099 break;
7100 handle_field_decl:
7101 if (!remove
7102 && TREE_CODE (t) == FIELD_DECL
7103 && t == OMP_CLAUSE_DECL (c))
7104 {
7105 OMP_CLAUSE_DECL (c)
7106 = omp_privatize_field (t, (OMP_CLAUSE_CODE (c)
7107 == OMP_CLAUSE_SHARED));
7108 if (OMP_CLAUSE_DECL (c) == error_mark_node)
7109 remove = true;
7110 }
7111 break;
7112
7113 case OMP_CLAUSE_FIRSTPRIVATE:
7114 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) && !implicit_moved)
7115 {
7116 move_implicit:
7117 implicit_moved = true;
7118 /* Move firstprivate and map clauses with
7119 OMP_CLAUSE_{FIRSTPRIVATE,MAP}_IMPLICIT set to the end of
7120 clauses chain. */
7121 tree cl1 = NULL_TREE, cl2 = NULL_TREE;
7122 tree *pc1 = pc, *pc2 = &cl1, *pc3 = &cl2;
7123 while (*pc1)
7124 if (OMP_CLAUSE_CODE (*pc1) == OMP_CLAUSE_FIRSTPRIVATE
7125 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (*pc1))
7126 {
7127 *pc3 = *pc1;
7128 pc3 = &OMP_CLAUSE_CHAIN (*pc3);
7129 *pc1 = OMP_CLAUSE_CHAIN (*pc1);
7130 }
7131 else if (OMP_CLAUSE_CODE (*pc1) == OMP_CLAUSE_MAP
7132 && OMP_CLAUSE_MAP_IMPLICIT (*pc1))
7133 {
7134 *pc2 = *pc1;
7135 pc2 = &OMP_CLAUSE_CHAIN (*pc2);
7136 *pc1 = OMP_CLAUSE_CHAIN (*pc1);
7137 }
7138 else
7139 pc1 = &OMP_CLAUSE_CHAIN (*pc1);
7140 *pc3 = NULL;
7141 *pc2 = cl2;
7142 *pc1 = cl1;
7143 continue;
7144 }
7145 t = omp_clause_decl_field (OMP_CLAUSE_DECL (c));
7146 if (t)
7147 omp_note_field_privatization (t, OMP_CLAUSE_DECL (c));
7148 else
7149 t = OMP_CLAUSE_DECL (c);
7150 if (ort != C_ORT_ACC && t == current_class_ptr)
7151 {
7152 error_at (OMP_CLAUSE_LOCATION (c),
7153 "%<this%> allowed in OpenMP only in %<declare simd%>"
7154 " clauses");
7155 remove = true;
7156 break;
7157 }
7158 if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL
7159 && ((ort & C_ORT_OMP_DECLARE_SIMD) != C_ORT_OMP
7160 || TREE_CODE (t) != FIELD_DECL))
7161 {
7162 if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
7163 break;
7164 if (DECL_P (t))
7165 error_at (OMP_CLAUSE_LOCATION (c),
7166 "%qD is not a variable in clause %<firstprivate%>",
7167 t);
7168 else
7169 error_at (OMP_CLAUSE_LOCATION (c),
7170 "%qE is not a variable in clause %<firstprivate%>",
7171 t);
7172 remove = true;
7173 }
7174 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7175 && !OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c)
7176 && bitmap_bit_p (&map_firstprivate_head, DECL_UID (t)))
7177 remove = true;
7178 else if (bitmap_bit_p (&generic_head, DECL_UID (t))
7179 || bitmap_bit_p (&firstprivate_head, DECL_UID (t))
7180 || bitmap_bit_p (&map_firstprivate_head, DECL_UID (t)))
7181 {
7182 error_at (OMP_CLAUSE_LOCATION (c),
7183 "%qD appears more than once in data clauses", t);
7184 remove = true;
7185 }
7186 else if (bitmap_bit_p (&map_head, DECL_UID (t)))
7187 {
7188 if (ort == C_ORT_ACC)
7189 error_at (OMP_CLAUSE_LOCATION (c),
7190 "%qD appears more than once in data clauses", t);
7191 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7192 && !OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c))
7193 /* Silently drop the clause. */;
7194 else
7195 error_at (OMP_CLAUSE_LOCATION (c),
7196 "%qD appears both in data and map clauses", t);
7197 remove = true;
7198 }
7199 else
7200 bitmap_set_bit (&firstprivate_head, DECL_UID (t));
7201 goto handle_field_decl;
7202
7203 case OMP_CLAUSE_LASTPRIVATE:
7204 t = omp_clause_decl_field (OMP_CLAUSE_DECL (c));
7205 if (t)
7206 omp_note_field_privatization (t, OMP_CLAUSE_DECL (c));
7207 else
7208 t = OMP_CLAUSE_DECL (c);
7209 if (ort != C_ORT_ACC && t == current_class_ptr)
7210 {
7211 error_at (OMP_CLAUSE_LOCATION (c),
7212 "%<this%> allowed in OpenMP only in %<declare simd%>"
7213 " clauses");
7214 remove = true;
7215 break;
7216 }
7217 if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL
7218 && ((ort & C_ORT_OMP_DECLARE_SIMD) != C_ORT_OMP
7219 || TREE_CODE (t) != FIELD_DECL))
7220 {
7221 if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
7222 break;
7223 if (DECL_P (t))
7224 error_at (OMP_CLAUSE_LOCATION (c),
7225 "%qD is not a variable in clause %<lastprivate%>",
7226 t);
7227 else
7228 error_at (OMP_CLAUSE_LOCATION (c),
7229 "%qE is not a variable in clause %<lastprivate%>",
7230 t);
7231 remove = true;
7232 }
7233 else if (bitmap_bit_p (&generic_head, DECL_UID (t))
7234 || bitmap_bit_p (&lastprivate_head, DECL_UID (t)))
7235 {
7236 error_at (OMP_CLAUSE_LOCATION (c),
7237 "%qD appears more than once in data clauses", t);
7238 remove = true;
7239 }
7240 else
7241 bitmap_set_bit (&lastprivate_head, DECL_UID (t));
7242 goto handle_field_decl;
7243
7244 case OMP_CLAUSE_IF:
7245 t = OMP_CLAUSE_IF_EXPR (c);
7246 t = maybe_convert_cond (t);
7247 if (t == error_mark_node)
7248 remove = true;
7249 else if (!processing_template_decl)
7250 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
7251 OMP_CLAUSE_IF_EXPR (c) = t;
7252 break;
7253
7254 case OMP_CLAUSE_FINAL:
7255 t = OMP_CLAUSE_FINAL_EXPR (c);
7256 t = maybe_convert_cond (t);
7257 if (t == error_mark_node)
7258 remove = true;
7259 else if (!processing_template_decl)
7260 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
7261 OMP_CLAUSE_FINAL_EXPR (c) = t;
7262 break;
7263
7264 case OMP_CLAUSE_GANG:
7265 /* Operand 1 is the gang static: argument. */
7266 t = OMP_CLAUSE_OPERAND (c, 1);
7267 if (t != NULL_TREE)
7268 {
7269 if (t == error_mark_node)
7270 remove = true;
7271 else if (!type_dependent_expression_p (t)
7272 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
7273 {
7274 error_at (OMP_CLAUSE_LOCATION (c),
7275 "%<gang%> static expression must be integral");
7276 remove = true;
7277 }
7278 else
7279 {
7280 t = mark_rvalue_use (t);
7281 if (!processing_template_decl)
7282 {
7283 t = maybe_constant_value (t);
7284 if (TREE_CODE (t) == INTEGER_CST
7285 && tree_int_cst_sgn (t) != 1
7286 && t != integer_minus_one_node)
7287 {
7288 warning_at (OMP_CLAUSE_LOCATION (c), 0,
7289 "%<gang%> static value must be "
7290 "positive");
7291 t = integer_one_node;
7292 }
7293 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
7294 }
7295 }
7296 OMP_CLAUSE_OPERAND (c, 1) = t;
7297 }
7298 /* Check operand 0, the num argument. */
7299 /* FALLTHRU */
7300
7301 case OMP_CLAUSE_WORKER:
7302 case OMP_CLAUSE_VECTOR:
7303 if (OMP_CLAUSE_OPERAND (c, 0) == NULL_TREE)
7304 break;
7305 /* FALLTHRU */
7306
7307 case OMP_CLAUSE_NUM_TASKS:
7308 case OMP_CLAUSE_NUM_TEAMS:
7309 case OMP_CLAUSE_NUM_THREADS:
7310 case OMP_CLAUSE_NUM_GANGS:
7311 case OMP_CLAUSE_NUM_WORKERS:
7312 case OMP_CLAUSE_VECTOR_LENGTH:
7313 t = OMP_CLAUSE_OPERAND (c, 0);
7314 if (t == error_mark_node)
7315 remove = true;
7316 else if (!type_dependent_expression_p (t)
7317 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
7318 {
7319 switch (OMP_CLAUSE_CODE (c))
7320 {
7321 case OMP_CLAUSE_GANG:
7322 error_at (OMP_CLAUSE_LOCATION (c),
7323 "%<gang%> num expression must be integral"); break;
7324 case OMP_CLAUSE_VECTOR:
7325 error_at (OMP_CLAUSE_LOCATION (c),
7326 "%<vector%> length expression must be integral");
7327 break;
7328 case OMP_CLAUSE_WORKER:
7329 error_at (OMP_CLAUSE_LOCATION (c),
7330 "%<worker%> num expression must be integral");
7331 break;
7332 default:
7333 error_at (OMP_CLAUSE_LOCATION (c),
7334 "%qs expression must be integral",
7335 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7336 }
7337 remove = true;
7338 }
7339 else
7340 {
7341 t = mark_rvalue_use (t);
7342 if (!processing_template_decl)
7343 {
7344 t = maybe_constant_value (t);
7345 if (TREE_CODE (t) == INTEGER_CST
7346 && tree_int_cst_sgn (t) != 1)
7347 {
7348 switch (OMP_CLAUSE_CODE (c))
7349 {
7350 case OMP_CLAUSE_GANG:
7351 warning_at (OMP_CLAUSE_LOCATION (c), 0,
7352 "%<gang%> num value must be positive");
7353 break;
7354 case OMP_CLAUSE_VECTOR:
7355 warning_at (OMP_CLAUSE_LOCATION (c), 0,
7356 "%<vector%> length value must be "
7357 "positive");
7358 break;
7359 case OMP_CLAUSE_WORKER:
7360 warning_at (OMP_CLAUSE_LOCATION (c), 0,
7361 "%<worker%> num value must be "
7362 "positive");
7363 break;
7364 default:
7365 warning_at (OMP_CLAUSE_LOCATION (c), 0,
7366 "%qs value must be positive",
7367 omp_clause_code_name
7368 [OMP_CLAUSE_CODE (c)]);
7369 }
7370 t = integer_one_node;
7371 }
7372 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
7373 }
7374 OMP_CLAUSE_OPERAND (c, 0) = t;
7375 }
7376 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS
7377 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
7378 && !remove)
7379 {
7380 t = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c);
7381 if (t == error_mark_node)
7382 remove = true;
7383 else if (!type_dependent_expression_p (t)
7384 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
7385 {
7386 error_at (OMP_CLAUSE_LOCATION (c),
7387 "%qs expression must be integral",
7388 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7389 remove = true;
7390 }
7391 else
7392 {
7393 t = mark_rvalue_use (t);
7394 if (!processing_template_decl)
7395 {
7396 t = maybe_constant_value (t);
7397 if (TREE_CODE (t) == INTEGER_CST
7398 && tree_int_cst_sgn (t) != 1)
7399 {
7400 warning_at (OMP_CLAUSE_LOCATION (c), 0,
7401 "%qs value must be positive",
7402 omp_clause_code_name
7403 [OMP_CLAUSE_CODE (c)]);
7404 t = NULL_TREE;
7405 }
7406 else
7407 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
7408 tree upper = OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c);
7409 if (t
7410 && TREE_CODE (t) == INTEGER_CST
7411 && TREE_CODE (upper) == INTEGER_CST
7412 && tree_int_cst_lt (upper, t))
7413 {
7414 warning_at (OMP_CLAUSE_LOCATION (c), 0,
7415 "%<num_teams%> lower bound %qE bigger "
7416 "than upper bound %qE", t, upper);
7417 t = NULL_TREE;
7418 }
7419 }
7420 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = t;
7421 }
7422 }
7423 break;
7424
7425 case OMP_CLAUSE_SCHEDULE:
7426 t = OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c);
7427 if (t == NULL)
7428 ;
7429 else if (t == error_mark_node)
7430 remove = true;
7431 else if (!type_dependent_expression_p (t)
7432 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
7433 {
7434 error_at (OMP_CLAUSE_LOCATION (c),
7435 "schedule chunk size expression must be integral");
7436 remove = true;
7437 }
7438 else
7439 {
7440 t = mark_rvalue_use (t);
7441 if (!processing_template_decl)
7442 {
7443 t = maybe_constant_value (t);
7444 if (TREE_CODE (t) == INTEGER_CST
7445 && tree_int_cst_sgn (t) != 1)
7446 {
7447 warning_at (OMP_CLAUSE_LOCATION (c), 0,
7448 "chunk size value must be positive");
7449 t = integer_one_node;
7450 }
7451 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
7452 }
7453 OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c) = t;
7454 }
7455 if (!remove)
7456 schedule_seen = true;
7457 break;
7458
7459 case OMP_CLAUSE_SIMDLEN:
7460 case OMP_CLAUSE_SAFELEN:
7461 t = OMP_CLAUSE_OPERAND (c, 0);
7462 if (t == error_mark_node)
7463 remove = true;
7464 else if (!type_dependent_expression_p (t)
7465 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
7466 {
7467 error_at (OMP_CLAUSE_LOCATION (c),
7468 "%qs length expression must be integral",
7469 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7470 remove = true;
7471 }
7472 else
7473 {
7474 t = mark_rvalue_use (t);
7475 if (!processing_template_decl)
7476 {
7477 t = maybe_constant_value (t);
7478 if (TREE_CODE (t) != INTEGER_CST
7479 || tree_int_cst_sgn (t) != 1)
7480 {
7481 error_at (OMP_CLAUSE_LOCATION (c),
7482 "%qs length expression must be positive "
7483 "constant integer expression",
7484 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7485 remove = true;
7486 }
7487 }
7488 OMP_CLAUSE_OPERAND (c, 0) = t;
7489 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SAFELEN)
7490 safelen = c;
7491 }
7492 break;
7493
7494 case OMP_CLAUSE_ASYNC:
7495 t = OMP_CLAUSE_ASYNC_EXPR (c);
7496 if (t == error_mark_node)
7497 remove = true;
7498 else if (!type_dependent_expression_p (t)
7499 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
7500 {
7501 error_at (OMP_CLAUSE_LOCATION (c),
7502 "%<async%> expression must be integral");
7503 remove = true;
7504 }
7505 else
7506 {
7507 t = mark_rvalue_use (t);
7508 if (!processing_template_decl)
7509 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
7510 OMP_CLAUSE_ASYNC_EXPR (c) = t;
7511 }
7512 break;
7513
7514 case OMP_CLAUSE_WAIT:
7515 t = OMP_CLAUSE_WAIT_EXPR (c);
7516 if (t == error_mark_node)
7517 remove = true;
7518 else if (!processing_template_decl)
7519 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
7520 OMP_CLAUSE_WAIT_EXPR (c) = t;
7521 break;
7522
7523 case OMP_CLAUSE_THREAD_LIMIT:
7524 t = OMP_CLAUSE_THREAD_LIMIT_EXPR (c);
7525 if (t == error_mark_node)
7526 remove = true;
7527 else if (!type_dependent_expression_p (t)
7528 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
7529 {
7530 error_at (OMP_CLAUSE_LOCATION (c),
7531 "%<thread_limit%> expression must be integral");
7532 remove = true;
7533 }
7534 else
7535 {
7536 t = mark_rvalue_use (t);
7537 if (!processing_template_decl)
7538 {
7539 t = maybe_constant_value (t);
7540 if (TREE_CODE (t) == INTEGER_CST
7541 && tree_int_cst_sgn (t) != 1)
7542 {
7543 warning_at (OMP_CLAUSE_LOCATION (c), 0,
7544 "%<thread_limit%> value must be positive");
7545 t = integer_one_node;
7546 }
7547 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
7548 }
7549 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = t;
7550 }
7551 break;
7552
7553 case OMP_CLAUSE_DEVICE:
7554 t = OMP_CLAUSE_DEVICE_ID (c);
7555 if (t == error_mark_node)
7556 remove = true;
7557 else if (!type_dependent_expression_p (t)
7558 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
7559 {
7560 error_at (OMP_CLAUSE_LOCATION (c),
7561 "%<device%> id must be integral");
7562 remove = true;
7563 }
7564 else if (OMP_CLAUSE_DEVICE_ANCESTOR (c)
7565 && TREE_CODE (t) == INTEGER_CST
7566 && !integer_onep (t))
7567 {
7568 error_at (OMP_CLAUSE_LOCATION (c),
7569 "the %<device%> clause expression must evaluate to "
7570 "%<1%>");
7571 remove = true;
7572 }
7573 else
7574 {
7575 t = mark_rvalue_use (t);
7576 if (!processing_template_decl)
7577 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
7578 OMP_CLAUSE_DEVICE_ID (c) = t;
7579 }
7580 break;
7581
7582 case OMP_CLAUSE_DIST_SCHEDULE:
7583 t = OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c);
7584 if (t == NULL)
7585 ;
7586 else if (t == error_mark_node)
7587 remove = true;
7588 else if (!type_dependent_expression_p (t)
7589 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
7590 {
7591 error_at (OMP_CLAUSE_LOCATION (c),
7592 "%<dist_schedule%> chunk size expression must be "
7593 "integral");
7594 remove = true;
7595 }
7596 else
7597 {
7598 t = mark_rvalue_use (t);
7599 if (!processing_template_decl)
7600 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
7601 OMP_CLAUSE_DIST_SCHEDULE_CHUNK_EXPR (c) = t;
7602 }
7603 break;
7604
7605 case OMP_CLAUSE_ALIGNED:
7606 t = OMP_CLAUSE_DECL (c);
7607 if (t == current_class_ptr && ort != C_ORT_OMP_DECLARE_SIMD)
7608 {
7609 error_at (OMP_CLAUSE_LOCATION (c),
7610 "%<this%> allowed in OpenMP only in %<declare simd%>"
7611 " clauses");
7612 remove = true;
7613 break;
7614 }
7615 if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL)
7616 {
7617 if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
7618 break;
7619 if (DECL_P (t))
7620 error_at (OMP_CLAUSE_LOCATION (c),
7621 "%qD is not a variable in %<aligned%> clause", t);
7622 else
7623 error_at (OMP_CLAUSE_LOCATION (c),
7624 "%qE is not a variable in %<aligned%> clause", t);
7625 remove = true;
7626 }
7627 else if (!type_dependent_expression_p (t)
7628 && !TYPE_PTR_P (TREE_TYPE (t))
7629 && TREE_CODE (TREE_TYPE (t)) != ARRAY_TYPE
7630 && (!TYPE_REF_P (TREE_TYPE (t))
7631 || (!INDIRECT_TYPE_P (TREE_TYPE (TREE_TYPE (t)))
7632 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
7633 != ARRAY_TYPE))))
7634 {
7635 error_at (OMP_CLAUSE_LOCATION (c),
7636 "%qE in %<aligned%> clause is neither a pointer nor "
7637 "an array nor a reference to pointer or array", t);
7638 remove = true;
7639 }
7640 else if (bitmap_bit_p (&aligned_head, DECL_UID (t)))
7641 {
7642 error_at (OMP_CLAUSE_LOCATION (c),
7643 "%qD appears more than once in %<aligned%> clauses",
7644 t);
7645 remove = true;
7646 }
7647 else
7648 bitmap_set_bit (&aligned_head, DECL_UID (t));
7649 t = OMP_CLAUSE_ALIGNED_ALIGNMENT (c);
7650 if (t == error_mark_node)
7651 remove = true;
7652 else if (t == NULL_TREE)
7653 break;
7654 else if (!type_dependent_expression_p (t)
7655 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
7656 {
7657 error_at (OMP_CLAUSE_LOCATION (c),
7658 "%<aligned%> clause alignment expression must "
7659 "be integral");
7660 remove = true;
7661 }
7662 else
7663 {
7664 t = mark_rvalue_use (t);
7665 if (!processing_template_decl)
7666 {
7667 t = maybe_constant_value (t);
7668 if (TREE_CODE (t) != INTEGER_CST
7669 || tree_int_cst_sgn (t) != 1)
7670 {
7671 error_at (OMP_CLAUSE_LOCATION (c),
7672 "%<aligned%> clause alignment expression must "
7673 "be positive constant integer expression");
7674 remove = true;
7675 }
7676 else
7677 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
7678 }
7679 OMP_CLAUSE_ALIGNED_ALIGNMENT (c) = t;
7680 }
7681 break;
7682
7683 case OMP_CLAUSE_NONTEMPORAL:
7684 t = OMP_CLAUSE_DECL (c);
7685 if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL)
7686 {
7687 if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
7688 break;
7689 if (DECL_P (t))
7690 error_at (OMP_CLAUSE_LOCATION (c),
7691 "%qD is not a variable in %<nontemporal%> clause",
7692 t);
7693 else
7694 error_at (OMP_CLAUSE_LOCATION (c),
7695 "%qE is not a variable in %<nontemporal%> clause",
7696 t);
7697 remove = true;
7698 }
7699 else if (bitmap_bit_p (&oacc_reduction_head, DECL_UID (t)))
7700 {
7701 error_at (OMP_CLAUSE_LOCATION (c),
7702 "%qD appears more than once in %<nontemporal%> "
7703 "clauses", t);
7704 remove = true;
7705 }
7706 else
7707 bitmap_set_bit (&oacc_reduction_head, DECL_UID (t));
7708 break;
7709
7710 case OMP_CLAUSE_ALLOCATE:
7711 t = omp_clause_decl_field (OMP_CLAUSE_DECL (c));
7712 if (t)
7713 omp_note_field_privatization (t, OMP_CLAUSE_DECL (c));
7714 else
7715 t = OMP_CLAUSE_DECL (c);
7716 if (t == current_class_ptr)
7717 {
7718 error_at (OMP_CLAUSE_LOCATION (c),
7719 "%<this%> not allowed in %<allocate%> clause");
7720 remove = true;
7721 break;
7722 }
7723 if (!VAR_P (t)
7724 && TREE_CODE (t) != PARM_DECL
7725 && TREE_CODE (t) != FIELD_DECL)
7726 {
7727 if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
7728 break;
7729 if (DECL_P (t))
7730 error_at (OMP_CLAUSE_LOCATION (c),
7731 "%qD is not a variable in %<allocate%> clause", t);
7732 else
7733 error_at (OMP_CLAUSE_LOCATION (c),
7734 "%qE is not a variable in %<allocate%> clause", t);
7735 remove = true;
7736 }
7737 else if (bitmap_bit_p (&aligned_head, DECL_UID (t)))
7738 {
7739 warning_at (OMP_CLAUSE_LOCATION (c), 0,
7740 "%qD appears more than once in %<allocate%> clauses",
7741 t);
7742 remove = true;
7743 }
7744 else
7745 {
7746 bitmap_set_bit (&aligned_head, DECL_UID (t));
7747 allocate_seen = true;
7748 }
7749 tree allocator, align;
7750 align = OMP_CLAUSE_ALLOCATE_ALIGN (c);
7751 if (error_operand_p (align))
7752 {
7753 remove = true;
7754 break;
7755 }
7756 if (align)
7757 {
7758 if (!type_dependent_expression_p (align)
7759 && !INTEGRAL_TYPE_P (TREE_TYPE (align)))
7760 {
7761 error_at (OMP_CLAUSE_LOCATION (c),
7762 "%<allocate%> clause %<align%> modifier "
7763 "argument needs to be positive constant "
7764 "power of two integer expression");
7765 remove = true;
7766 }
7767 else
7768 {
7769 align = mark_rvalue_use (align);
7770 if (!processing_template_decl)
7771 {
7772 align = maybe_constant_value (align);
7773 if (TREE_CODE (align) != INTEGER_CST
7774 || !tree_fits_uhwi_p (align)
7775 || !integer_pow2p (align))
7776 {
7777 error_at (OMP_CLAUSE_LOCATION (c),
7778 "%<allocate%> clause %<align%> modifier "
7779 "argument needs to be positive constant "
7780 "power of two integer expression");
7781 remove = true;
7782 }
7783 }
7784 }
7785 OMP_CLAUSE_ALLOCATE_ALIGN (c) = align;
7786 }
7787 allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
7788 if (error_operand_p (allocator))
7789 {
7790 remove = true;
7791 break;
7792 }
7793 if (allocator == NULL_TREE)
7794 goto handle_field_decl;
7795 tree allocatort;
7796 allocatort = TYPE_MAIN_VARIANT (TREE_TYPE (allocator));
7797 if (!type_dependent_expression_p (allocator)
7798 && (TREE_CODE (allocatort) != ENUMERAL_TYPE
7799 || TYPE_NAME (allocatort) == NULL_TREE
7800 || TREE_CODE (TYPE_NAME (allocatort)) != TYPE_DECL
7801 || (DECL_NAME (TYPE_NAME (allocatort))
7802 != get_identifier ("omp_allocator_handle_t"))
7803 || (TYPE_CONTEXT (allocatort)
7804 != DECL_CONTEXT (global_namespace))))
7805 {
7806 error_at (OMP_CLAUSE_LOCATION (c),
7807 "%<allocate%> clause allocator expression has "
7808 "type %qT rather than %<omp_allocator_handle_t%>",
7809 TREE_TYPE (allocator));
7810 remove = true;
7811 break;
7812 }
7813 else
7814 {
7815 allocator = mark_rvalue_use (allocator);
7816 if (!processing_template_decl)
7817 allocator = maybe_constant_value (allocator);
7818 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) = allocator;
7819 }
7820 goto handle_field_decl;
7821
7822 case OMP_CLAUSE_DEPEND:
7823 t = OMP_CLAUSE_DECL (c);
7824 if (t == NULL_TREE)
7825 {
7826 gcc_assert (OMP_CLAUSE_DEPEND_KIND (c)
7827 == OMP_CLAUSE_DEPEND_SOURCE);
7828 break;
7829 }
7830 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
7831 {
7832 if (cp_finish_omp_clause_depend_sink (c))
7833 remove = true;
7834 break;
7835 }
7836 /* FALLTHRU */
7837 case OMP_CLAUSE_AFFINITY:
7838 t = OMP_CLAUSE_DECL (c);
7839 if (TREE_CODE (t) == TREE_LIST
7840 && TREE_PURPOSE (t)
7841 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7842 {
7843 if (TREE_PURPOSE (t) != last_iterators)
7844 last_iterators_remove
7845 = cp_omp_finish_iterators (TREE_PURPOSE (t));
7846 last_iterators = TREE_PURPOSE (t);
7847 t = TREE_VALUE (t);
7848 if (last_iterators_remove)
7849 t = error_mark_node;
7850 }
7851 else
7852 last_iterators = NULL_TREE;
7853
7854 if (TREE_CODE (t) == TREE_LIST)
7855 {
7856 if (handle_omp_array_sections (c, ort))
7857 remove = true;
7858 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7859 && (OMP_CLAUSE_DEPEND_KIND (c)
7860 == OMP_CLAUSE_DEPEND_DEPOBJ))
7861 {
7862 error_at (OMP_CLAUSE_LOCATION (c),
7863 "%<depend%> clause with %<depobj%> dependence "
7864 "type on array section");
7865 remove = true;
7866 }
7867 break;
7868 }
7869 if (t == error_mark_node)
7870 remove = true;
7871 else if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
7872 break;
7873 else if (!lvalue_p (t))
7874 {
7875 if (DECL_P (t))
7876 error_at (OMP_CLAUSE_LOCATION (c),
7877 "%qD is not lvalue expression nor array section "
7878 "in %qs clause", t,
7879 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7880 else
7881 error_at (OMP_CLAUSE_LOCATION (c),
7882 "%qE is not lvalue expression nor array section "
7883 "in %qs clause", t,
7884 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7885 remove = true;
7886 }
7887 else if (TREE_CODE (t) == COMPONENT_REF
7888 && TREE_CODE (TREE_OPERAND (t, 1)) == FIELD_DECL
7889 && DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
7890 {
7891 error_at (OMP_CLAUSE_LOCATION (c),
7892 "bit-field %qE in %qs clause", t,
7893 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
7894 remove = true;
7895 }
7896 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7897 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_DEPOBJ)
7898 {
7899 if (!c_omp_depend_t_p (TYPE_REF_P (TREE_TYPE (t))
7900 ? TREE_TYPE (TREE_TYPE (t))
7901 : TREE_TYPE (t)))
7902 {
7903 error_at (OMP_CLAUSE_LOCATION (c),
7904 "%qE does not have %<omp_depend_t%> type in "
7905 "%<depend%> clause with %<depobj%> dependence "
7906 "type", t);
7907 remove = true;
7908 }
7909 }
7910 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7911 && c_omp_depend_t_p (TYPE_REF_P (TREE_TYPE (t))
7912 ? TREE_TYPE (TREE_TYPE (t))
7913 : TREE_TYPE (t)))
7914 {
7915 error_at (OMP_CLAUSE_LOCATION (c),
7916 "%qE should not have %<omp_depend_t%> type in "
7917 "%<depend%> clause with dependence type other than "
7918 "%<depobj%>", t);
7919 remove = true;
7920 }
7921 if (!remove)
7922 {
7923 tree addr = cp_build_addr_expr (t, tf_warning_or_error);
7924 if (addr == error_mark_node)
7925 remove = true;
7926 else
7927 {
7928 t = cp_build_indirect_ref (OMP_CLAUSE_LOCATION (c),
7929 addr, RO_UNARY_STAR,
7930 tf_warning_or_error);
7931 if (t == error_mark_node)
7932 remove = true;
7933 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == TREE_LIST
7934 && TREE_PURPOSE (OMP_CLAUSE_DECL (c))
7935 && (TREE_CODE (TREE_PURPOSE (OMP_CLAUSE_DECL (c)))
7936 == TREE_VEC))
7937 TREE_VALUE (OMP_CLAUSE_DECL (c)) = t;
7938 else
7939 OMP_CLAUSE_DECL (c) = t;
7940 }
7941 }
7942 break;
7943 case OMP_CLAUSE_DETACH:
7944 t = OMP_CLAUSE_DECL (c);
7945 if (detach_seen)
7946 {
7947 error_at (OMP_CLAUSE_LOCATION (c),
7948 "too many %qs clauses on a task construct",
7949 "detach");
7950 remove = true;
7951 break;
7952 }
7953 else if (error_operand_p (t))
7954 {
7955 remove = true;
7956 break;
7957 }
7958 else
7959 {
7960 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (t));
7961 if (!type_dependent_expression_p (t)
7962 && (!INTEGRAL_TYPE_P (type)
7963 || TREE_CODE (type) != ENUMERAL_TYPE
7964 || TYPE_NAME (type) == NULL_TREE
7965 || (DECL_NAME (TYPE_NAME (type))
7966 != get_identifier ("omp_event_handle_t"))))
7967 {
7968 error_at (OMP_CLAUSE_LOCATION (c),
7969 "%<detach%> clause event handle "
7970 "has type %qT rather than "
7971 "%<omp_event_handle_t%>",
7972 type);
7973 remove = true;
7974 }
7975 detach_seen = c;
7976 cxx_mark_addressable (t);
7977 }
7978 break;
7979
7980 case OMP_CLAUSE_MAP:
7981 if (OMP_CLAUSE_MAP_IMPLICIT (c) && !implicit_moved)
7982 goto move_implicit;
7983 /* FALLTHRU */
7984 case OMP_CLAUSE_TO:
7985 case OMP_CLAUSE_FROM:
7986 case OMP_CLAUSE__CACHE_:
7987 t = OMP_CLAUSE_DECL (c);
7988 if (TREE_CODE (t) == TREE_LIST)
7989 {
7990 if (handle_omp_array_sections (c, ort))
7991 remove = true;
7992 else
7993 {
7994 t = OMP_CLAUSE_DECL (c);
7995 if (TREE_CODE (t) != TREE_LIST
7996 && !type_dependent_expression_p (t)
7997 && !cp_omp_mappable_type (TREE_TYPE (t)))
7998 {
7999 error_at (OMP_CLAUSE_LOCATION (c),
8000 "array section does not have mappable type "
8001 "in %qs clause",
8002 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
8003 cp_omp_emit_unmappable_type_notes (TREE_TYPE (t));
8004 remove = true;
8005 }
8006 while (TREE_CODE (t) == ARRAY_REF)
8007 t = TREE_OPERAND (t, 0);
8008 if (TREE_CODE (t) == COMPONENT_REF
8009 && TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE)
8010 {
8011 do
8012 {
8013 t = TREE_OPERAND (t, 0);
8014 if (REFERENCE_REF_P (t))
8015 t = TREE_OPERAND (t, 0);
8016 if (TREE_CODE (t) == MEM_REF
8017 || TREE_CODE (t) == INDIRECT_REF)
8018 {
8019 t = TREE_OPERAND (t, 0);
8020 STRIP_NOPS (t);
8021 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
8022 t = TREE_OPERAND (t, 0);
8023 }
8024 }
8025 while (TREE_CODE (t) == COMPONENT_REF
8026 || TREE_CODE (t) == ARRAY_REF);
8027
8028 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8029 && OMP_CLAUSE_MAP_IMPLICIT (c)
8030 && (bitmap_bit_p (&map_head, DECL_UID (t))
8031 || bitmap_bit_p (&map_field_head, DECL_UID (t))
8032 || bitmap_bit_p (&map_firstprivate_head,
8033 DECL_UID (t))))
8034 {
8035 remove = true;
8036 break;
8037 }
8038 if (bitmap_bit_p (&map_field_head, DECL_UID (t)))
8039 break;
8040 if (bitmap_bit_p (&map_head, DECL_UID (t)))
8041 {
8042 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
8043 error_at (OMP_CLAUSE_LOCATION (c),
8044 "%qD appears more than once in motion"
8045 " clauses", t);
8046 else if (ort == C_ORT_ACC)
8047 error_at (OMP_CLAUSE_LOCATION (c),
8048 "%qD appears more than once in data"
8049 " clauses", t);
8050 else
8051 error_at (OMP_CLAUSE_LOCATION (c),
8052 "%qD appears more than once in map"
8053 " clauses", t);
8054 remove = true;
8055 }
8056 else
8057 {
8058 bitmap_set_bit (&map_head, DECL_UID (t));
8059 bitmap_set_bit (&map_field_head, DECL_UID (t));
8060 }
8061 }
8062 }
8063 if (cp_oacc_check_attachments (c))
8064 remove = true;
8065 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8066 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
8067 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH))
8068 /* In this case, we have a single array element which is a
8069 pointer, and we already set OMP_CLAUSE_SIZE in
8070 handle_omp_array_sections above. For attach/detach clauses,
8071 reset the OMP_CLAUSE_SIZE (representing a bias) to zero
8072 here. */
8073 OMP_CLAUSE_SIZE (c) = size_zero_node;
8074 break;
8075 }
8076 if (t == error_mark_node)
8077 {
8078 remove = true;
8079 break;
8080 }
8081 /* OpenACC attach / detach clauses must be pointers. */
8082 if (cp_oacc_check_attachments (c))
8083 {
8084 remove = true;
8085 break;
8086 }
8087 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8088 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
8089 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH))
8090 /* For attach/detach clauses, set OMP_CLAUSE_SIZE (representing a
8091 bias) to zero here, so it is not set erroneously to the pointer
8092 size later on in gimplify.cc. */
8093 OMP_CLAUSE_SIZE (c) = size_zero_node;
8094 if (REFERENCE_REF_P (t)
8095 && TREE_CODE (TREE_OPERAND (t, 0)) == COMPONENT_REF)
8096 {
8097 t = TREE_OPERAND (t, 0);
8098 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8099 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH)
8100 OMP_CLAUSE_DECL (c) = t;
8101 }
8102 while (TREE_CODE (t) == INDIRECT_REF
8103 || TREE_CODE (t) == ARRAY_REF)
8104 {
8105 t = TREE_OPERAND (t, 0);
8106 STRIP_NOPS (t);
8107 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
8108 t = TREE_OPERAND (t, 0);
8109 }
8110 while (TREE_CODE (t) == COMPOUND_EXPR)
8111 {
8112 t = TREE_OPERAND (t, 1);
8113 STRIP_NOPS (t);
8114 }
8115 indir_component_ref_p = false;
8116 if (TREE_CODE (t) == COMPONENT_REF
8117 && (TREE_CODE (TREE_OPERAND (t, 0)) == INDIRECT_REF
8118 || TREE_CODE (TREE_OPERAND (t, 0)) == ARRAY_REF))
8119 {
8120 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
8121 indir_component_ref_p = true;
8122 STRIP_NOPS (t);
8123 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
8124 t = TREE_OPERAND (t, 0);
8125 }
8126 if (TREE_CODE (t) == COMPONENT_REF
8127 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE__CACHE_)
8128 {
8129 if (type_dependent_expression_p (t))
8130 break;
8131 if (TREE_CODE (TREE_OPERAND (t, 1)) == FIELD_DECL
8132 && DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
8133 {
8134 error_at (OMP_CLAUSE_LOCATION (c),
8135 "bit-field %qE in %qs clause",
8136 t, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
8137 remove = true;
8138 }
8139 else if (!cp_omp_mappable_type (TREE_TYPE (t)))
8140 {
8141 error_at (OMP_CLAUSE_LOCATION (c),
8142 "%qE does not have a mappable type in %qs clause",
8143 t, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
8144 cp_omp_emit_unmappable_type_notes (TREE_TYPE (t));
8145 remove = true;
8146 }
8147 while (TREE_CODE (t) == COMPONENT_REF)
8148 {
8149 if (TREE_TYPE (TREE_OPERAND (t, 0))
8150 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
8151 == UNION_TYPE))
8152 {
8153 error_at (OMP_CLAUSE_LOCATION (c),
8154 "%qE is a member of a union", t);
8155 remove = true;
8156 break;
8157 }
8158 t = TREE_OPERAND (t, 0);
8159 if (TREE_CODE (t) == MEM_REF)
8160 {
8161 if (maybe_ne (mem_ref_offset (t), 0))
8162 error_at (OMP_CLAUSE_LOCATION (c),
8163 "cannot dereference %qE in %qs clause", t,
8164 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
8165 else
8166 t = TREE_OPERAND (t, 0);
8167 }
8168 while (TREE_CODE (t) == MEM_REF
8169 || TREE_CODE (t) == INDIRECT_REF
8170 || TREE_CODE (t) == ARRAY_REF)
8171 {
8172 t = TREE_OPERAND (t, 0);
8173 STRIP_NOPS (t);
8174 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
8175 t = TREE_OPERAND (t, 0);
8176 }
8177 }
8178 if (remove)
8179 break;
8180 if (REFERENCE_REF_P (t))
8181 t = TREE_OPERAND (t, 0);
8182 if (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
8183 {
8184 if (bitmap_bit_p (&map_field_head, DECL_UID (t))
8185 || (ort != C_ORT_ACC
8186 && bitmap_bit_p (&map_head, DECL_UID (t))))
8187 goto handle_map_references;
8188 }
8189 }
8190 if (!processing_template_decl
8191 && TREE_CODE (t) == FIELD_DECL)
8192 {
8193 OMP_CLAUSE_DECL (c) = finish_non_static_data_member (t, NULL_TREE,
8194 NULL_TREE);
8195 break;
8196 }
8197 if (!VAR_P (t) && TREE_CODE (t) != PARM_DECL)
8198 {
8199 if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
8200 break;
8201 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8202 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
8203 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
8204 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH))
8205 break;
8206 if (DECL_P (t))
8207 error_at (OMP_CLAUSE_LOCATION (c),
8208 "%qD is not a variable in %qs clause", t,
8209 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
8210 else
8211 error_at (OMP_CLAUSE_LOCATION (c),
8212 "%qE is not a variable in %qs clause", t,
8213 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
8214 remove = true;
8215 }
8216 else if (VAR_P (t) && CP_DECL_THREAD_LOCAL_P (t))
8217 {
8218 error_at (OMP_CLAUSE_LOCATION (c),
8219 "%qD is threadprivate variable in %qs clause", t,
8220 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
8221 remove = true;
8222 }
8223 else if (!processing_template_decl
8224 && !TYPE_REF_P (TREE_TYPE (t))
8225 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
8226 || (OMP_CLAUSE_MAP_KIND (c)
8227 != GOMP_MAP_FIRSTPRIVATE_POINTER))
8228 && !indir_component_ref_p
8229 && !cxx_mark_addressable (t))
8230 remove = true;
8231 else if (!(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8232 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
8233 || (OMP_CLAUSE_MAP_KIND (c)
8234 == GOMP_MAP_FIRSTPRIVATE_POINTER)))
8235 && t == OMP_CLAUSE_DECL (c)
8236 && !type_dependent_expression_p (t)
8237 && !cp_omp_mappable_type (TYPE_REF_P (TREE_TYPE (t))
8238 ? TREE_TYPE (TREE_TYPE (t))
8239 : TREE_TYPE (t)))
8240 {
8241 error_at (OMP_CLAUSE_LOCATION (c),
8242 "%qD does not have a mappable type in %qs clause", t,
8243 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
8244 cp_omp_emit_unmappable_type_notes (TREE_TYPE (t));
8245 remove = true;
8246 }
8247 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8248 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FORCE_DEVICEPTR
8249 && !type_dependent_expression_p (t)
8250 && !INDIRECT_TYPE_P (TREE_TYPE (t)))
8251 {
8252 error_at (OMP_CLAUSE_LOCATION (c),
8253 "%qD is not a pointer variable", t);
8254 remove = true;
8255 }
8256 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8257 && OMP_CLAUSE_MAP_IMPLICIT (c)
8258 && (bitmap_bit_p (&map_head, DECL_UID (t))
8259 || bitmap_bit_p (&map_field_head, DECL_UID (t))
8260 || bitmap_bit_p (&map_firstprivate_head,
8261 DECL_UID (t))))
8262 remove = true;
8263 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8264 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8265 {
8266 if (bitmap_bit_p (&generic_head, DECL_UID (t))
8267 || bitmap_bit_p (&firstprivate_head, DECL_UID (t))
8268 || bitmap_bit_p (&map_firstprivate_head, DECL_UID (t)))
8269 {
8270 error_at (OMP_CLAUSE_LOCATION (c),
8271 "%qD appears more than once in data clauses", t);
8272 remove = true;
8273 }
8274 else if (bitmap_bit_p (&map_head, DECL_UID (t))
8275 && !bitmap_bit_p (&map_field_head, DECL_UID (t)))
8276 {
8277 if (ort == C_ORT_ACC)
8278 error_at (OMP_CLAUSE_LOCATION (c),
8279 "%qD appears more than once in data clauses", t);
8280 else
8281 error_at (OMP_CLAUSE_LOCATION (c),
8282 "%qD appears both in data and map clauses", t);
8283 remove = true;
8284 }
8285 else
8286 bitmap_set_bit (&map_firstprivate_head, DECL_UID (t));
8287 }
8288 else if (bitmap_bit_p (&map_head, DECL_UID (t))
8289 && !bitmap_bit_p (&map_field_head, DECL_UID (t)))
8290 {
8291 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
8292 error_at (OMP_CLAUSE_LOCATION (c),
8293 "%qD appears more than once in motion clauses", t);
8294 else if (ort == C_ORT_ACC)
8295 error_at (OMP_CLAUSE_LOCATION (c),
8296 "%qD appears more than once in data clauses", t);
8297 else
8298 error_at (OMP_CLAUSE_LOCATION (c),
8299 "%qD appears more than once in map clauses", t);
8300 remove = true;
8301 }
8302 else if (ort == C_ORT_ACC
8303 && bitmap_bit_p (&generic_head, DECL_UID (t)))
8304 {
8305 error_at (OMP_CLAUSE_LOCATION (c),
8306 "%qD appears more than once in data clauses", t);
8307 remove = true;
8308 }
8309 else if (bitmap_bit_p (&firstprivate_head, DECL_UID (t))
8310 || bitmap_bit_p (&is_on_device_head, DECL_UID (t)))
8311 {
8312 if (ort == C_ORT_ACC)
8313 error_at (OMP_CLAUSE_LOCATION (c),
8314 "%qD appears more than once in data clauses", t);
8315 else
8316 error_at (OMP_CLAUSE_LOCATION (c),
8317 "%qD appears both in data and map clauses", t);
8318 remove = true;
8319 }
8320 else
8321 {
8322 bitmap_set_bit (&map_head, DECL_UID (t));
8323
8324 tree decl = OMP_CLAUSE_DECL (c);
8325 if (t != decl
8326 && (TREE_CODE (decl) == COMPONENT_REF
8327 || (INDIRECT_REF_P (decl)
8328 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8329 && TYPE_REF_P (TREE_TYPE (TREE_OPERAND (decl, 0))))))
8330 bitmap_set_bit (&map_field_head, DECL_UID (t));
8331 }
8332 handle_map_references:
8333 if (!remove
8334 && !processing_template_decl
8335 && ort != C_ORT_DECLARE_SIMD
8336 && TYPE_REF_P (TREE_TYPE (OMP_CLAUSE_DECL (c))))
8337 {
8338 t = OMP_CLAUSE_DECL (c);
8339 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
8340 {
8341 OMP_CLAUSE_DECL (c) = build_simple_mem_ref (t);
8342 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8343 OMP_CLAUSE_SIZE (c)
8344 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (t)));
8345 }
8346 else if (OMP_CLAUSE_MAP_KIND (c)
8347 != GOMP_MAP_FIRSTPRIVATE_POINTER
8348 && (OMP_CLAUSE_MAP_KIND (c)
8349 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8350 && (OMP_CLAUSE_MAP_KIND (c)
8351 != GOMP_MAP_ALWAYS_POINTER)
8352 && (OMP_CLAUSE_MAP_KIND (c)
8353 != GOMP_MAP_ATTACH_DETACH))
8354 {
8355 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8356 OMP_CLAUSE_MAP);
8357 if (TREE_CODE (t) == COMPONENT_REF)
8358 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALWAYS_POINTER);
8359 else
8360 OMP_CLAUSE_SET_MAP_KIND (c2,
8361 GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8362 OMP_CLAUSE_DECL (c2) = t;
8363 OMP_CLAUSE_SIZE (c2) = size_zero_node;
8364 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (c);
8365 OMP_CLAUSE_CHAIN (c) = c2;
8366 OMP_CLAUSE_DECL (c) = build_simple_mem_ref (t);
8367 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8368 OMP_CLAUSE_SIZE (c)
8369 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (t)));
8370 c = c2;
8371 }
8372 }
8373 break;
8374
8375 case OMP_CLAUSE_TO_DECLARE:
8376 case OMP_CLAUSE_LINK:
8377 t = OMP_CLAUSE_DECL (c);
8378 if (TREE_CODE (t) == FUNCTION_DECL
8379 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO_DECLARE)
8380 ;
8381 else if (!VAR_P (t))
8382 {
8383 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO_DECLARE)
8384 {
8385 if (TREE_CODE (t) == TEMPLATE_ID_EXPR)
8386 error_at (OMP_CLAUSE_LOCATION (c),
8387 "template %qE in clause %qs", t,
8388 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
8389 else if (really_overloaded_fn (t))
8390 error_at (OMP_CLAUSE_LOCATION (c),
8391 "overloaded function name %qE in clause %qs", t,
8392 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
8393 else
8394 error_at (OMP_CLAUSE_LOCATION (c),
8395 "%qE is neither a variable nor a function name "
8396 "in clause %qs", t,
8397 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
8398 }
8399 else
8400 error_at (OMP_CLAUSE_LOCATION (c),
8401 "%qE is not a variable in clause %qs", t,
8402 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
8403 remove = true;
8404 }
8405 else if (DECL_THREAD_LOCAL_P (t))
8406 {
8407 error_at (OMP_CLAUSE_LOCATION (c),
8408 "%qD is threadprivate variable in %qs clause", t,
8409 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
8410 remove = true;
8411 }
8412 else if (!cp_omp_mappable_type (TREE_TYPE (t)))
8413 {
8414 error_at (OMP_CLAUSE_LOCATION (c),
8415 "%qD does not have a mappable type in %qs clause", t,
8416 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
8417 cp_omp_emit_unmappable_type_notes (TREE_TYPE (t));
8418 remove = true;
8419 }
8420 if (remove)
8421 break;
8422 if (bitmap_bit_p (&generic_head, DECL_UID (t)))
8423 {
8424 error_at (OMP_CLAUSE_LOCATION (c),
8425 "%qE appears more than once on the same "
8426 "%<declare target%> directive", t);
8427 remove = true;
8428 }
8429 else
8430 bitmap_set_bit (&generic_head, DECL_UID (t));
8431 break;
8432
8433 case OMP_CLAUSE_UNIFORM:
8434 t = OMP_CLAUSE_DECL (c);
8435 if (TREE_CODE (t) != PARM_DECL)
8436 {
8437 if (processing_template_decl)
8438 break;
8439 if (DECL_P (t))
8440 error_at (OMP_CLAUSE_LOCATION (c),
8441 "%qD is not an argument in %<uniform%> clause", t);
8442 else
8443 error_at (OMP_CLAUSE_LOCATION (c),
8444 "%qE is not an argument in %<uniform%> clause", t);
8445 remove = true;
8446 break;
8447 }
8448 /* map_head bitmap is used as uniform_head if declare_simd. */
8449 bitmap_set_bit (&map_head, DECL_UID (t));
8450 goto check_dup_generic;
8451
8452 case OMP_CLAUSE_GRAINSIZE:
8453 t = OMP_CLAUSE_GRAINSIZE_EXPR (c);
8454 if (t == error_mark_node)
8455 remove = true;
8456 else if (!type_dependent_expression_p (t)
8457 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
8458 {
8459 error_at (OMP_CLAUSE_LOCATION (c),
8460 "%<grainsize%> expression must be integral");
8461 remove = true;
8462 }
8463 else
8464 {
8465 t = mark_rvalue_use (t);
8466 if (!processing_template_decl)
8467 {
8468 t = maybe_constant_value (t);
8469 if (TREE_CODE (t) == INTEGER_CST
8470 && tree_int_cst_sgn (t) != 1)
8471 {
8472 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8473 "%<grainsize%> value must be positive");
8474 t = integer_one_node;
8475 }
8476 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
8477 }
8478 OMP_CLAUSE_GRAINSIZE_EXPR (c) = t;
8479 }
8480 break;
8481
8482 case OMP_CLAUSE_PRIORITY:
8483 t = OMP_CLAUSE_PRIORITY_EXPR (c);
8484 if (t == error_mark_node)
8485 remove = true;
8486 else if (!type_dependent_expression_p (t)
8487 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
8488 {
8489 error_at (OMP_CLAUSE_LOCATION (c),
8490 "%<priority%> expression must be integral");
8491 remove = true;
8492 }
8493 else
8494 {
8495 t = mark_rvalue_use (t);
8496 if (!processing_template_decl)
8497 {
8498 t = maybe_constant_value (t);
8499 if (TREE_CODE (t) == INTEGER_CST
8500 && tree_int_cst_sgn (t) == -1)
8501 {
8502 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8503 "%<priority%> value must be non-negative");
8504 t = integer_one_node;
8505 }
8506 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
8507 }
8508 OMP_CLAUSE_PRIORITY_EXPR (c) = t;
8509 }
8510 break;
8511
8512 case OMP_CLAUSE_HINT:
8513 t = OMP_CLAUSE_HINT_EXPR (c);
8514 if (t == error_mark_node)
8515 remove = true;
8516 else if (!type_dependent_expression_p (t)
8517 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
8518 {
8519 error_at (OMP_CLAUSE_LOCATION (c),
8520 "%<hint%> expression must be integral");
8521 remove = true;
8522 }
8523 else
8524 {
8525 t = mark_rvalue_use (t);
8526 if (!processing_template_decl)
8527 {
8528 t = maybe_constant_value (t);
8529 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
8530 if (TREE_CODE (t) != INTEGER_CST)
8531 {
8532 error_at (OMP_CLAUSE_LOCATION (c),
8533 "%<hint%> expression must be constant integer "
8534 "expression");
8535 remove = true;
8536 }
8537 }
8538 OMP_CLAUSE_HINT_EXPR (c) = t;
8539 }
8540 break;
8541
8542 case OMP_CLAUSE_FILTER:
8543 t = OMP_CLAUSE_FILTER_EXPR (c);
8544 if (t == error_mark_node)
8545 remove = true;
8546 else if (!type_dependent_expression_p (t)
8547 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
8548 {
8549 error_at (OMP_CLAUSE_LOCATION (c),
8550 "%<filter%> expression must be integral");
8551 remove = true;
8552 }
8553 else
8554 {
8555 t = mark_rvalue_use (t);
8556 if (!processing_template_decl)
8557 {
8558 t = maybe_constant_value (t);
8559 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
8560 }
8561 OMP_CLAUSE_FILTER_EXPR (c) = t;
8562 }
8563 break;
8564
8565 case OMP_CLAUSE_IS_DEVICE_PTR:
8566 case OMP_CLAUSE_USE_DEVICE_PTR:
8567 field_ok = (ort & C_ORT_OMP_DECLARE_SIMD) == C_ORT_OMP;
8568 t = OMP_CLAUSE_DECL (c);
8569 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
8570 bitmap_set_bit (&is_on_device_head, DECL_UID (t));
8571 if (!type_dependent_expression_p (t))
8572 {
8573 tree type = TREE_TYPE (t);
8574 if (!TYPE_PTR_P (type)
8575 && (!TYPE_REF_P (type) || !TYPE_PTR_P (TREE_TYPE (type))))
8576 {
8577 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR
8578 && ort == C_ORT_OMP)
8579 {
8580 error_at (OMP_CLAUSE_LOCATION (c),
8581 "%qs variable is neither a pointer "
8582 "nor reference to pointer",
8583 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
8584 remove = true;
8585 }
8586 else if (TREE_CODE (type) != ARRAY_TYPE
8587 && (!TYPE_REF_P (type)
8588 || TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
8589 {
8590 error_at (OMP_CLAUSE_LOCATION (c),
8591 "%qs variable is neither a pointer, nor an "
8592 "array nor reference to pointer or array",
8593 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
8594 remove = true;
8595 }
8596 }
8597 }
8598 goto check_dup_generic;
8599
8600 case OMP_CLAUSE_HAS_DEVICE_ADDR:
8601 t = OMP_CLAUSE_DECL (c);
8602 if (TREE_CODE (t) == TREE_LIST)
8603 {
8604 if (handle_omp_array_sections (c, ort))
8605 remove = true;
8606 else
8607 {
8608 t = OMP_CLAUSE_DECL (c);
8609 while (TREE_CODE (t) == INDIRECT_REF
8610 || TREE_CODE (t) == ARRAY_REF)
8611 t = TREE_OPERAND (t, 0);
8612 }
8613 }
8614 bitmap_set_bit (&is_on_device_head, DECL_UID (t));
8615 if (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
8616 cxx_mark_addressable (t);
8617 goto check_dup_generic_t;
8618
8619 case OMP_CLAUSE_USE_DEVICE_ADDR:
8620 field_ok = true;
8621 t = OMP_CLAUSE_DECL (c);
8622 if (!processing_template_decl
8623 && (VAR_P (t) || TREE_CODE (t) == PARM_DECL)
8624 && !TYPE_REF_P (TREE_TYPE (t))
8625 && !cxx_mark_addressable (t))
8626 remove = true;
8627 goto check_dup_generic;
8628
8629 case OMP_CLAUSE_NOWAIT:
8630 case OMP_CLAUSE_DEFAULT:
8631 case OMP_CLAUSE_UNTIED:
8632 case OMP_CLAUSE_COLLAPSE:
8633 case OMP_CLAUSE_PARALLEL:
8634 case OMP_CLAUSE_FOR:
8635 case OMP_CLAUSE_SECTIONS:
8636 case OMP_CLAUSE_TASKGROUP:
8637 case OMP_CLAUSE_PROC_BIND:
8638 case OMP_CLAUSE_DEVICE_TYPE:
8639 case OMP_CLAUSE_NOGROUP:
8640 case OMP_CLAUSE_THREADS:
8641 case OMP_CLAUSE_SIMD:
8642 case OMP_CLAUSE_DEFAULTMAP:
8643 case OMP_CLAUSE_BIND:
8644 case OMP_CLAUSE_AUTO:
8645 case OMP_CLAUSE_INDEPENDENT:
8646 case OMP_CLAUSE_SEQ:
8647 case OMP_CLAUSE_IF_PRESENT:
8648 case OMP_CLAUSE_FINALIZE:
8649 case OMP_CLAUSE_NOHOST:
8650 break;
8651
8652 case OMP_CLAUSE_MERGEABLE:
8653 mergeable_seen = true;
8654 break;
8655
8656 case OMP_CLAUSE_TILE:
8657 for (tree list = OMP_CLAUSE_TILE_LIST (c); !remove && list;
8658 list = TREE_CHAIN (list))
8659 {
8660 t = TREE_VALUE (list);
8661
8662 if (t == error_mark_node)
8663 remove = true;
8664 else if (!type_dependent_expression_p (t)
8665 && !INTEGRAL_TYPE_P (TREE_TYPE (t)))
8666 {
8667 error_at (OMP_CLAUSE_LOCATION (c),
8668 "%<tile%> argument needs integral type");
8669 remove = true;
8670 }
8671 else
8672 {
8673 t = mark_rvalue_use (t);
8674 if (!processing_template_decl)
8675 {
8676 /* Zero is used to indicate '*', we permit you
8677 to get there via an ICE of value zero. */
8678 t = maybe_constant_value (t);
8679 if (!tree_fits_shwi_p (t)
8680 || tree_to_shwi (t) < 0)
8681 {
8682 error_at (OMP_CLAUSE_LOCATION (c),
8683 "%<tile%> argument needs positive "
8684 "integral constant");
8685 remove = true;
8686 }
8687 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
8688 }
8689 }
8690
8691 /* Update list item. */
8692 TREE_VALUE (list) = t;
8693 }
8694 break;
8695
8696 case OMP_CLAUSE_ORDERED:
8697 ordered_seen = true;
8698 break;
8699
8700 case OMP_CLAUSE_ORDER:
8701 if (order_seen)
8702 remove = true;
8703 else
8704 order_seen = true;
8705 break;
8706
8707 case OMP_CLAUSE_INBRANCH:
8708 case OMP_CLAUSE_NOTINBRANCH:
8709 if (branch_seen)
8710 {
8711 error_at (OMP_CLAUSE_LOCATION (c),
8712 "%<inbranch%> clause is incompatible with "
8713 "%<notinbranch%>");
8714 remove = true;
8715 }
8716 branch_seen = true;
8717 break;
8718
8719 case OMP_CLAUSE_INCLUSIVE:
8720 case OMP_CLAUSE_EXCLUSIVE:
8721 t = omp_clause_decl_field (OMP_CLAUSE_DECL (c));
8722 if (!t)
8723 t = OMP_CLAUSE_DECL (c);
8724 if (t == current_class_ptr)
8725 {
8726 error_at (OMP_CLAUSE_LOCATION (c),
8727 "%<this%> allowed in OpenMP only in %<declare simd%>"
8728 " clauses");
8729 remove = true;
8730 break;
8731 }
8732 if (!VAR_P (t)
8733 && TREE_CODE (t) != PARM_DECL
8734 && TREE_CODE (t) != FIELD_DECL)
8735 {
8736 if (processing_template_decl && TREE_CODE (t) != OVERLOAD)
8737 break;
8738 if (DECL_P (t))
8739 error_at (OMP_CLAUSE_LOCATION (c),
8740 "%qD is not a variable in clause %qs", t,
8741 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
8742 else
8743 error_at (OMP_CLAUSE_LOCATION (c),
8744 "%qE is not a variable in clause %qs", t,
8745 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
8746 remove = true;
8747 }
8748 break;
8749
8750 default:
8751 gcc_unreachable ();
8752 }
8753
8754 if (remove)
8755 *pc = OMP_CLAUSE_CHAIN (c);
8756 else
8757 pc = &OMP_CLAUSE_CHAIN (c);
8758 }
8759
8760 if (reduction_seen < 0 && (ordered_seen || schedule_seen))
8761 reduction_seen = -2;
8762
8763 for (pc = &clauses, c = clauses; c ; c = *pc)
8764 {
8765 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
8766 bool remove = false;
8767 bool need_complete_type = false;
8768 bool need_default_ctor = false;
8769 bool need_copy_ctor = false;
8770 bool need_copy_assignment = false;
8771 bool need_implicitly_determined = false;
8772 bool need_dtor = false;
8773 tree type, inner_type;
8774
8775 switch (c_kind)
8776 {
8777 case OMP_CLAUSE_SHARED:
8778 need_implicitly_determined = true;
8779 break;
8780 case OMP_CLAUSE_PRIVATE:
8781 need_complete_type = true;
8782 need_default_ctor = true;
8783 need_dtor = true;
8784 need_implicitly_determined = true;
8785 break;
8786 case OMP_CLAUSE_FIRSTPRIVATE:
8787 need_complete_type = true;
8788 need_copy_ctor = true;
8789 need_dtor = true;
8790 need_implicitly_determined = true;
8791 break;
8792 case OMP_CLAUSE_LASTPRIVATE:
8793 need_complete_type = true;
8794 need_copy_assignment = true;
8795 need_implicitly_determined = true;
8796 break;
8797 case OMP_CLAUSE_REDUCTION:
8798 if (reduction_seen == -2)
8799 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8800 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
8801 need_copy_assignment = true;
8802 need_implicitly_determined = true;
8803 break;
8804 case OMP_CLAUSE_IN_REDUCTION:
8805 case OMP_CLAUSE_TASK_REDUCTION:
8806 case OMP_CLAUSE_INCLUSIVE:
8807 case OMP_CLAUSE_EXCLUSIVE:
8808 need_implicitly_determined = true;
8809 break;
8810 case OMP_CLAUSE_LINEAR:
8811 if (ort != C_ORT_OMP_DECLARE_SIMD)
8812 need_implicitly_determined = true;
8813 else if (OMP_CLAUSE_LINEAR_VARIABLE_STRIDE (c)
8814 && !bitmap_bit_p (&map_head,
8815 DECL_UID (OMP_CLAUSE_LINEAR_STEP (c))))
8816 {
8817 error_at (OMP_CLAUSE_LOCATION (c),
8818 "%<linear%> clause step is a parameter %qD not "
8819 "specified in %<uniform%> clause",
8820 OMP_CLAUSE_LINEAR_STEP (c));
8821 *pc = OMP_CLAUSE_CHAIN (c);
8822 continue;
8823 }
8824 break;
8825 case OMP_CLAUSE_COPYPRIVATE:
8826 need_copy_assignment = true;
8827 break;
8828 case OMP_CLAUSE_COPYIN:
8829 need_copy_assignment = true;
8830 break;
8831 case OMP_CLAUSE_SIMDLEN:
8832 if (safelen
8833 && !processing_template_decl
8834 && tree_int_cst_lt (OMP_CLAUSE_SAFELEN_EXPR (safelen),
8835 OMP_CLAUSE_SIMDLEN_EXPR (c)))
8836 {
8837 error_at (OMP_CLAUSE_LOCATION (c),
8838 "%<simdlen%> clause value is bigger than "
8839 "%<safelen%> clause value");
8840 OMP_CLAUSE_SIMDLEN_EXPR (c)
8841 = OMP_CLAUSE_SAFELEN_EXPR (safelen);
8842 }
8843 pc = &OMP_CLAUSE_CHAIN (c);
8844 continue;
8845 case OMP_CLAUSE_SCHEDULE:
8846 if (ordered_seen
8847 && (OMP_CLAUSE_SCHEDULE_KIND (c)
8848 & OMP_CLAUSE_SCHEDULE_NONMONOTONIC))
8849 {
8850 error_at (OMP_CLAUSE_LOCATION (c),
8851 "%<nonmonotonic%> schedule modifier specified "
8852 "together with %<ordered%> clause");
8853 OMP_CLAUSE_SCHEDULE_KIND (c)
8854 = (enum omp_clause_schedule_kind)
8855 (OMP_CLAUSE_SCHEDULE_KIND (c)
8856 & ~OMP_CLAUSE_SCHEDULE_NONMONOTONIC);
8857 }
8858 if (reduction_seen == -2)
8859 error_at (OMP_CLAUSE_LOCATION (c),
8860 "%qs clause specified together with %<inscan%> "
8861 "%<reduction%> clause", "schedule");
8862 pc = &OMP_CLAUSE_CHAIN (c);
8863 continue;
8864 case OMP_CLAUSE_NOGROUP:
8865 if (reduction_seen)
8866 {
8867 error_at (OMP_CLAUSE_LOCATION (c),
8868 "%<nogroup%> clause must not be used together with "
8869 "%<reduction%> clause");
8870 *pc = OMP_CLAUSE_CHAIN (c);
8871 continue;
8872 }
8873 pc = &OMP_CLAUSE_CHAIN (c);
8874 continue;
8875 case OMP_CLAUSE_ORDERED:
8876 if (reduction_seen == -2)
8877 error_at (OMP_CLAUSE_LOCATION (c),
8878 "%qs clause specified together with %<inscan%> "
8879 "%<reduction%> clause", "ordered");
8880 pc = &OMP_CLAUSE_CHAIN (c);
8881 continue;
8882 case OMP_CLAUSE_ORDER:
8883 if (ordered_seen)
8884 {
8885 error_at (OMP_CLAUSE_LOCATION (c),
8886 "%<order%> clause must not be used together "
8887 "with %<ordered%>");
8888 *pc = OMP_CLAUSE_CHAIN (c);
8889 continue;
8890 }
8891 pc = &OMP_CLAUSE_CHAIN (c);
8892 continue;
8893 case OMP_CLAUSE_DETACH:
8894 if (mergeable_seen)
8895 {
8896 error_at (OMP_CLAUSE_LOCATION (c),
8897 "%<detach%> clause must not be used together with "
8898 "%<mergeable%> clause");
8899 *pc = OMP_CLAUSE_CHAIN (c);
8900 continue;
8901 }
8902 pc = &OMP_CLAUSE_CHAIN (c);
8903 continue;
8904 case OMP_CLAUSE_MAP:
8905 if (target_in_reduction_seen && !processing_template_decl)
8906 {
8907 t = OMP_CLAUSE_DECL (c);
8908 while (handled_component_p (t)
8909 || TREE_CODE (t) == INDIRECT_REF
8910 || TREE_CODE (t) == ADDR_EXPR
8911 || TREE_CODE (t) == MEM_REF
8912 || TREE_CODE (t) == NON_LVALUE_EXPR)
8913 t = TREE_OPERAND (t, 0);
8914 if (DECL_P (t)
8915 && bitmap_bit_p (&oacc_reduction_head, DECL_UID (t)))
8916 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
8917 }
8918 pc = &OMP_CLAUSE_CHAIN (c);
8919 continue;
8920 case OMP_CLAUSE_NOWAIT:
8921 if (copyprivate_seen)
8922 {
8923 error_at (OMP_CLAUSE_LOCATION (c),
8924 "%<nowait%> clause must not be used together "
8925 "with %<copyprivate%>");
8926 *pc = OMP_CLAUSE_CHAIN (c);
8927 continue;
8928 }
8929 /* FALLTHRU */
8930 default:
8931 pc = &OMP_CLAUSE_CHAIN (c);
8932 continue;
8933 }
8934
8935 t = OMP_CLAUSE_DECL (c);
8936 switch (c_kind)
8937 {
8938 case OMP_CLAUSE_LASTPRIVATE:
8939 if (DECL_P (t)
8940 && !bitmap_bit_p (&firstprivate_head, DECL_UID (t)))
8941 {
8942 need_default_ctor = true;
8943 need_dtor = true;
8944 }
8945 break;
8946
8947 case OMP_CLAUSE_REDUCTION:
8948 case OMP_CLAUSE_IN_REDUCTION:
8949 case OMP_CLAUSE_TASK_REDUCTION:
8950 if (allocate_seen)
8951 {
8952 if (TREE_CODE (t) == MEM_REF)
8953 {
8954 t = TREE_OPERAND (t, 0);
8955 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
8956 t = TREE_OPERAND (t, 0);
8957 if (TREE_CODE (t) == ADDR_EXPR
8958 || TREE_CODE (t) == INDIRECT_REF)
8959 t = TREE_OPERAND (t, 0);
8960 if (DECL_P (t))
8961 bitmap_clear_bit (&aligned_head, DECL_UID (t));
8962 }
8963 else if (TREE_CODE (t) == TREE_LIST)
8964 {
8965 while (TREE_CODE (t) == TREE_LIST)
8966 t = TREE_CHAIN (t);
8967 if (DECL_P (t))
8968 bitmap_clear_bit (&aligned_head, DECL_UID (t));
8969 t = OMP_CLAUSE_DECL (c);
8970 }
8971 else if (DECL_P (t))
8972 bitmap_clear_bit (&aligned_head, DECL_UID (t));
8973 t = OMP_CLAUSE_DECL (c);
8974 }
8975 if (processing_template_decl
8976 && !VAR_P (t) && TREE_CODE (t) != PARM_DECL)
8977 break;
8978 if (finish_omp_reduction_clause (c, &need_default_ctor,
8979 &need_dtor))
8980 remove = true;
8981 else
8982 t = OMP_CLAUSE_DECL (c);
8983 break;
8984
8985 case OMP_CLAUSE_COPYIN:
8986 if (processing_template_decl
8987 && !VAR_P (t) && TREE_CODE (t) != PARM_DECL)
8988 break;
8989 if (!VAR_P (t) || !CP_DECL_THREAD_LOCAL_P (t))
8990 {
8991 error_at (OMP_CLAUSE_LOCATION (c),
8992 "%qE must be %<threadprivate%> for %<copyin%>", t);
8993 remove = true;
8994 }
8995 break;
8996
8997 default:
8998 break;
8999 }
9000
9001 if (processing_template_decl
9002 && !VAR_P (t) && TREE_CODE (t) != PARM_DECL)
9003 {
9004 pc = &OMP_CLAUSE_CHAIN (c);
9005 continue;
9006 }
9007
9008 if (need_complete_type || need_copy_assignment)
9009 {
9010 t = require_complete_type (t);
9011 if (t == error_mark_node)
9012 remove = true;
9013 else if (!processing_template_decl
9014 && TYPE_REF_P (TREE_TYPE (t))
9015 && !complete_type_or_else (TREE_TYPE (TREE_TYPE (t)), t))
9016 remove = true;
9017 }
9018 if (need_implicitly_determined)
9019 {
9020 const char *share_name = NULL;
9021
9022 if (allocate_seen
9023 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SHARED
9024 && DECL_P (t))
9025 bitmap_clear_bit (&aligned_head, DECL_UID (t));
9026
9027 if (VAR_P (t) && CP_DECL_THREAD_LOCAL_P (t))
9028 share_name = "threadprivate";
9029 else switch (cxx_omp_predetermined_sharing_1 (t))
9030 {
9031 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
9032 break;
9033 case OMP_CLAUSE_DEFAULT_SHARED:
9034 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9035 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9036 && c_omp_predefined_variable (t))
9037 /* The __func__ variable and similar function-local predefined
9038 variables may be listed in a shared or firstprivate
9039 clause. */
9040 break;
9041 if (VAR_P (t)
9042 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
9043 && TREE_STATIC (t)
9044 && cxx_omp_const_qual_no_mutable (t))
9045 {
9046 tree ctx = CP_DECL_CONTEXT (t);
9047 /* const qualified static data members without mutable
9048 member may be specified in firstprivate clause. */
9049 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
9050 break;
9051 }
9052 share_name = "shared";
9053 break;
9054 case OMP_CLAUSE_DEFAULT_PRIVATE:
9055 share_name = "private";
9056 break;
9057 default:
9058 gcc_unreachable ();
9059 }
9060 if (share_name)
9061 {
9062 error_at (OMP_CLAUSE_LOCATION (c),
9063 "%qE is predetermined %qs for %qs",
9064 omp_clause_printable_decl (t), share_name,
9065 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
9066 remove = true;
9067 }
9068 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_SHARED
9069 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
9070 && cxx_omp_const_qual_no_mutable (t))
9071 {
9072 error_at (OMP_CLAUSE_LOCATION (c),
9073 "%<const%> qualified %qE without %<mutable%> member "
9074 "may appear only in %<shared%> or %<firstprivate%> "
9075 "clauses", omp_clause_printable_decl (t));
9076 remove = true;
9077 }
9078 }
9079
9080 if (detach_seen
9081 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9082 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
9083 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
9084 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9085 && OMP_CLAUSE_DECL (c) == OMP_CLAUSE_DECL (detach_seen))
9086 {
9087 error_at (OMP_CLAUSE_LOCATION (c),
9088 "the event handle of a %<detach%> clause "
9089 "should not be in a data-sharing clause");
9090 remove = true;
9091 }
9092
9093 /* We're interested in the base element, not arrays. */
9094 inner_type = type = TREE_TYPE (t);
9095 if ((need_complete_type
9096 || need_copy_assignment
9097 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9098 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
9099 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9100 && TYPE_REF_P (inner_type))
9101 inner_type = TREE_TYPE (inner_type);
9102 while (TREE_CODE (inner_type) == ARRAY_TYPE)
9103 inner_type = TREE_TYPE (inner_type);
9104
9105 /* Check for special function availability by building a call to one.
9106 Save the results, because later we won't be in the right context
9107 for making these queries. */
9108 if (CLASS_TYPE_P (inner_type)
9109 && COMPLETE_TYPE_P (inner_type)
9110 && (need_default_ctor || need_copy_ctor
9111 || need_copy_assignment || need_dtor)
9112 && !type_dependent_expression_p (t)
9113 && cxx_omp_create_clause_info (c, inner_type, need_default_ctor,
9114 need_copy_ctor, need_copy_assignment,
9115 need_dtor))
9116 remove = true;
9117
9118 if (!remove
9119 && c_kind == OMP_CLAUSE_SHARED
9120 && processing_template_decl)
9121 {
9122 t = omp_clause_decl_field (OMP_CLAUSE_DECL (c));
9123 if (t)
9124 OMP_CLAUSE_DECL (c) = t;
9125 }
9126
9127 if (remove)
9128 *pc = OMP_CLAUSE_CHAIN (c);
9129 else
9130 pc = &OMP_CLAUSE_CHAIN (c);
9131 }
9132
9133 if (allocate_seen)
9134 for (pc = &clauses, c = clauses; c ; c = *pc)
9135 {
9136 bool remove = false;
9137 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
9138 && !OMP_CLAUSE_ALLOCATE_COMBINED (c)
9139 && DECL_P (OMP_CLAUSE_DECL (c))
9140 && bitmap_bit_p (&aligned_head, DECL_UID (OMP_CLAUSE_DECL (c))))
9141 {
9142 error_at (OMP_CLAUSE_LOCATION (c),
9143 "%qD specified in %<allocate%> clause but not in "
9144 "an explicit privatization clause", OMP_CLAUSE_DECL (c));
9145 remove = true;
9146 }
9147 if (remove)
9148 *pc = OMP_CLAUSE_CHAIN (c);
9149 else
9150 pc = &OMP_CLAUSE_CHAIN (c);
9151 }
9152
9153 bitmap_obstack_release (NULL);
9154 return clauses;
9155 }
9156
9157 /* Start processing OpenMP clauses that can include any
9158 privatization clauses for non-static data members. */
9159
9160 tree
push_omp_privatization_clauses(bool ignore_next)9161 push_omp_privatization_clauses (bool ignore_next)
9162 {
9163 if (omp_private_member_ignore_next)
9164 {
9165 omp_private_member_ignore_next = ignore_next;
9166 return NULL_TREE;
9167 }
9168 omp_private_member_ignore_next = ignore_next;
9169 if (omp_private_member_map)
9170 omp_private_member_vec.safe_push (error_mark_node);
9171 return push_stmt_list ();
9172 }
9173
9174 /* Revert remapping of any non-static data members since
9175 the last push_omp_privatization_clauses () call. */
9176
9177 void
pop_omp_privatization_clauses(tree stmt)9178 pop_omp_privatization_clauses (tree stmt)
9179 {
9180 if (stmt == NULL_TREE)
9181 return;
9182 stmt = pop_stmt_list (stmt);
9183 if (omp_private_member_map)
9184 {
9185 while (!omp_private_member_vec.is_empty ())
9186 {
9187 tree t = omp_private_member_vec.pop ();
9188 if (t == error_mark_node)
9189 {
9190 add_stmt (stmt);
9191 return;
9192 }
9193 bool no_decl_expr = t == integer_zero_node;
9194 if (no_decl_expr)
9195 t = omp_private_member_vec.pop ();
9196 tree *v = omp_private_member_map->get (t);
9197 gcc_assert (v);
9198 if (!no_decl_expr)
9199 add_decl_expr (*v);
9200 omp_private_member_map->remove (t);
9201 }
9202 delete omp_private_member_map;
9203 omp_private_member_map = NULL;
9204 }
9205 add_stmt (stmt);
9206 }
9207
9208 /* Remember OpenMP privatization clauses mapping and clear it.
9209 Used for lambdas. */
9210
9211 void
save_omp_privatization_clauses(vec<tree> & save)9212 save_omp_privatization_clauses (vec<tree> &save)
9213 {
9214 save = vNULL;
9215 if (omp_private_member_ignore_next)
9216 save.safe_push (integer_one_node);
9217 omp_private_member_ignore_next = false;
9218 if (!omp_private_member_map)
9219 return;
9220
9221 while (!omp_private_member_vec.is_empty ())
9222 {
9223 tree t = omp_private_member_vec.pop ();
9224 if (t == error_mark_node)
9225 {
9226 save.safe_push (t);
9227 continue;
9228 }
9229 tree n = t;
9230 if (t == integer_zero_node)
9231 t = omp_private_member_vec.pop ();
9232 tree *v = omp_private_member_map->get (t);
9233 gcc_assert (v);
9234 save.safe_push (*v);
9235 save.safe_push (t);
9236 if (n != t)
9237 save.safe_push (n);
9238 }
9239 delete omp_private_member_map;
9240 omp_private_member_map = NULL;
9241 }
9242
9243 /* Restore OpenMP privatization clauses mapping saved by the
9244 above function. */
9245
9246 void
restore_omp_privatization_clauses(vec<tree> & save)9247 restore_omp_privatization_clauses (vec<tree> &save)
9248 {
9249 gcc_assert (omp_private_member_vec.is_empty ());
9250 omp_private_member_ignore_next = false;
9251 if (save.is_empty ())
9252 return;
9253 if (save.length () == 1 && save[0] == integer_one_node)
9254 {
9255 omp_private_member_ignore_next = true;
9256 save.release ();
9257 return;
9258 }
9259
9260 omp_private_member_map = new hash_map <tree, tree>;
9261 while (!save.is_empty ())
9262 {
9263 tree t = save.pop ();
9264 tree n = t;
9265 if (t != error_mark_node)
9266 {
9267 if (t == integer_one_node)
9268 {
9269 omp_private_member_ignore_next = true;
9270 gcc_assert (save.is_empty ());
9271 break;
9272 }
9273 if (t == integer_zero_node)
9274 t = save.pop ();
9275 tree &v = omp_private_member_map->get_or_insert (t);
9276 v = save.pop ();
9277 }
9278 omp_private_member_vec.safe_push (t);
9279 if (n != t)
9280 omp_private_member_vec.safe_push (n);
9281 }
9282 save.release ();
9283 }
9284
9285 /* For all variables in the tree_list VARS, mark them as thread local. */
9286
9287 void
finish_omp_threadprivate(tree vars)9288 finish_omp_threadprivate (tree vars)
9289 {
9290 tree t;
9291
9292 /* Mark every variable in VARS to be assigned thread local storage. */
9293 for (t = vars; t; t = TREE_CHAIN (t))
9294 {
9295 tree v = TREE_PURPOSE (t);
9296
9297 if (error_operand_p (v))
9298 ;
9299 else if (!VAR_P (v))
9300 error ("%<threadprivate%> %qD is not file, namespace "
9301 "or block scope variable", v);
9302 /* If V had already been marked threadprivate, it doesn't matter
9303 whether it had been used prior to this point. */
9304 else if (TREE_USED (v)
9305 && (DECL_LANG_SPECIFIC (v) == NULL
9306 || !CP_DECL_THREADPRIVATE_P (v)))
9307 error ("%qE declared %<threadprivate%> after first use", v);
9308 else if (! TREE_STATIC (v) && ! DECL_EXTERNAL (v))
9309 error ("automatic variable %qE cannot be %<threadprivate%>", v);
9310 else if (! COMPLETE_TYPE_P (complete_type (TREE_TYPE (v))))
9311 error ("%<threadprivate%> %qE has incomplete type", v);
9312 else if (TREE_STATIC (v) && TYPE_P (CP_DECL_CONTEXT (v))
9313 && CP_DECL_CONTEXT (v) != current_class_type)
9314 error ("%<threadprivate%> %qE directive not "
9315 "in %qT definition", v, CP_DECL_CONTEXT (v));
9316 else
9317 {
9318 /* Allocate a LANG_SPECIFIC structure for V, if needed. */
9319 if (DECL_LANG_SPECIFIC (v) == NULL)
9320 retrofit_lang_decl (v);
9321
9322 if (! CP_DECL_THREAD_LOCAL_P (v))
9323 {
9324 CP_DECL_THREAD_LOCAL_P (v) = true;
9325 set_decl_tls_model (v, decl_default_tls_model (v));
9326 /* If rtl has been already set for this var, call
9327 make_decl_rtl once again, so that encode_section_info
9328 has a chance to look at the new decl flags. */
9329 if (DECL_RTL_SET_P (v))
9330 make_decl_rtl (v);
9331 }
9332 CP_DECL_THREADPRIVATE_P (v) = 1;
9333 }
9334 }
9335 }
9336
9337 /* Build an OpenMP structured block. */
9338
9339 tree
begin_omp_structured_block(void)9340 begin_omp_structured_block (void)
9341 {
9342 return do_pushlevel (sk_omp);
9343 }
9344
9345 tree
finish_omp_structured_block(tree block)9346 finish_omp_structured_block (tree block)
9347 {
9348 return do_poplevel (block);
9349 }
9350
9351 /* Similarly, except force the retention of the BLOCK. */
9352
9353 tree
begin_omp_parallel(void)9354 begin_omp_parallel (void)
9355 {
9356 keep_next_level (true);
9357 return begin_omp_structured_block ();
9358 }
9359
9360 /* Generate OACC_DATA, with CLAUSES and BLOCK as its compound
9361 statement. */
9362
9363 tree
finish_oacc_data(tree clauses,tree block)9364 finish_oacc_data (tree clauses, tree block)
9365 {
9366 tree stmt;
9367
9368 block = finish_omp_structured_block (block);
9369
9370 stmt = make_node (OACC_DATA);
9371 TREE_TYPE (stmt) = void_type_node;
9372 OACC_DATA_CLAUSES (stmt) = clauses;
9373 OACC_DATA_BODY (stmt) = block;
9374
9375 return add_stmt (stmt);
9376 }
9377
9378 /* Generate OACC_HOST_DATA, with CLAUSES and BLOCK as its compound
9379 statement. */
9380
9381 tree
finish_oacc_host_data(tree clauses,tree block)9382 finish_oacc_host_data (tree clauses, tree block)
9383 {
9384 tree stmt;
9385
9386 block = finish_omp_structured_block (block);
9387
9388 stmt = make_node (OACC_HOST_DATA);
9389 TREE_TYPE (stmt) = void_type_node;
9390 OACC_HOST_DATA_CLAUSES (stmt) = clauses;
9391 OACC_HOST_DATA_BODY (stmt) = block;
9392
9393 return add_stmt (stmt);
9394 }
9395
9396 /* Generate OMP construct CODE, with BODY and CLAUSES as its compound
9397 statement. */
9398
9399 tree
finish_omp_construct(enum tree_code code,tree body,tree clauses)9400 finish_omp_construct (enum tree_code code, tree body, tree clauses)
9401 {
9402 body = finish_omp_structured_block (body);
9403
9404 tree stmt = make_node (code);
9405 TREE_TYPE (stmt) = void_type_node;
9406 OMP_BODY (stmt) = body;
9407 OMP_CLAUSES (stmt) = clauses;
9408
9409 return add_stmt (stmt);
9410 }
9411
9412 /* Used to walk OpenMP target directive body. */
9413
9414 struct omp_target_walk_data
9415 {
9416 /* Holds the 'this' expression found in current function. */
9417 tree current_object;
9418
9419 /* True if the 'this' expression was accessed in the target body. */
9420 bool this_expr_accessed;
9421
9422 /* For non-static functions, record which pointer-typed members were
9423 accessed, and the whole expression. */
9424 hash_map<tree, tree> ptr_members_accessed;
9425
9426 /* Record which lambda objects were accessed in target body. */
9427 hash_set<tree> lambda_objects_accessed;
9428
9429 /* For lambda functions, the __closure object expression of the current
9430 function, and the set of captured variables accessed in target body. */
9431 tree current_closure;
9432 hash_set<tree> closure_vars_accessed;
9433
9434 /* Local variables declared inside a BIND_EXPR, used to filter out such
9435 variables when recording lambda_objects_accessed. */
9436 hash_set<tree> local_decls;
9437 };
9438
9439 /* Helper function of finish_omp_target_clauses, called via
9440 cp_walk_tree_without_duplicates. Traverse body of OpenMP target
9441 directive *TP, and fill out omp_target_walk_data passed in *PTR. */
9442
9443 static tree
finish_omp_target_clauses_r(tree * tp,int * walk_subtrees,void * ptr)9444 finish_omp_target_clauses_r (tree *tp, int *walk_subtrees, void *ptr)
9445 {
9446 tree t = *tp;
9447 struct omp_target_walk_data *data = (struct omp_target_walk_data *) ptr;
9448 tree current_object = data->current_object;
9449 tree current_closure = data->current_closure;
9450
9451 /* References inside of these expression codes shouldn't incur any
9452 form of mapping, so return early. */
9453 if (TREE_CODE (t) == SIZEOF_EXPR
9454 || TREE_CODE (t) == ALIGNOF_EXPR)
9455 {
9456 *walk_subtrees = 0;
9457 return NULL_TREE;
9458 }
9459
9460 if (TREE_CODE (t) == OMP_CLAUSE)
9461 return NULL_TREE;
9462
9463 if (current_object)
9464 {
9465 tree this_expr = TREE_OPERAND (current_object, 0);
9466
9467 if (operand_equal_p (t, this_expr))
9468 {
9469 data->this_expr_accessed = true;
9470 *walk_subtrees = 0;
9471 return NULL_TREE;
9472 }
9473
9474 if (TREE_CODE (t) == COMPONENT_REF
9475 && POINTER_TYPE_P (TREE_TYPE (t))
9476 && operand_equal_p (TREE_OPERAND (t, 0), current_object)
9477 && TREE_CODE (TREE_OPERAND (t, 1)) == FIELD_DECL)
9478 {
9479 data->this_expr_accessed = true;
9480 tree fld = TREE_OPERAND (t, 1);
9481 if (data->ptr_members_accessed.get (fld) == NULL)
9482 {
9483 if (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE)
9484 t = convert_from_reference (t);
9485 data->ptr_members_accessed.put (fld, t);
9486 }
9487 *walk_subtrees = 0;
9488 return NULL_TREE;
9489 }
9490 }
9491
9492 /* When the current_function_decl is a lambda function, the closure object
9493 argument's type seems to not yet have fields layed out, so a recording
9494 of DECL_VALUE_EXPRs during the target body walk seems the only way to
9495 find them. */
9496 if (current_closure
9497 && (TREE_CODE (t) == VAR_DECL
9498 || TREE_CODE (t) == PARM_DECL
9499 || TREE_CODE (t) == RESULT_DECL)
9500 && DECL_HAS_VALUE_EXPR_P (t)
9501 && TREE_CODE (DECL_VALUE_EXPR (t)) == COMPONENT_REF
9502 && operand_equal_p (current_closure,
9503 TREE_OPERAND (DECL_VALUE_EXPR (t), 0)))
9504 {
9505 if (!data->closure_vars_accessed.contains (t))
9506 data->closure_vars_accessed.add (t);
9507 *walk_subtrees = 0;
9508 return NULL_TREE;
9509 }
9510
9511 if (TREE_CODE (t) == BIND_EXPR)
9512 {
9513 tree block = BIND_EXPR_BLOCK (t);
9514 for (tree var = BLOCK_VARS (block); var; var = DECL_CHAIN (var))
9515 if (!data->local_decls.contains (var))
9516 data->local_decls.add (var);
9517 return NULL_TREE;
9518 }
9519
9520 if (TREE_TYPE (t) && LAMBDA_TYPE_P (TREE_TYPE (t)))
9521 {
9522 tree lt = TREE_TYPE (t);
9523 gcc_assert (CLASS_TYPE_P (lt));
9524
9525 if (!data->lambda_objects_accessed.contains (t)
9526 /* Do not prepare to create target maps for locally declared
9527 lambdas or anonymous ones. */
9528 && !data->local_decls.contains (t)
9529 && TREE_CODE (t) != TARGET_EXPR)
9530 data->lambda_objects_accessed.add (t);
9531 *walk_subtrees = 0;
9532 return NULL_TREE;
9533 }
9534
9535 return NULL_TREE;
9536 }
9537
9538 /* Helper function for finish_omp_target, and also from tsubst_expr.
9539 Create additional clauses for mapping of non-static members, lambda objects,
9540 etc. */
9541
9542 void
finish_omp_target_clauses(location_t loc,tree body,tree * clauses_ptr)9543 finish_omp_target_clauses (location_t loc, tree body, tree *clauses_ptr)
9544 {
9545 omp_target_walk_data data;
9546 data.this_expr_accessed = false;
9547 data.current_object = NULL_TREE;
9548
9549 if (DECL_NONSTATIC_MEMBER_P (current_function_decl) && current_class_ptr)
9550 if (tree ct = current_nonlambda_class_type ())
9551 {
9552 tree object = maybe_dummy_object (ct, NULL);
9553 object = maybe_resolve_dummy (object, true);
9554 data.current_object = object;
9555 }
9556
9557 if (DECL_LAMBDA_FUNCTION_P (current_function_decl))
9558 {
9559 tree closure = DECL_ARGUMENTS (current_function_decl);
9560 data.current_closure = build_indirect_ref (loc, closure, RO_UNARY_STAR);
9561 }
9562 else
9563 data.current_closure = NULL_TREE;
9564
9565 cp_walk_tree_without_duplicates (&body, finish_omp_target_clauses_r, &data);
9566
9567 auto_vec<tree, 16> new_clauses;
9568
9569 tree omp_target_this_expr = NULL_TREE;
9570 tree *explicit_this_deref_map = NULL;
9571 if (data.this_expr_accessed)
9572 {
9573 omp_target_this_expr = TREE_OPERAND (data.current_object, 0);
9574
9575 /* See if explicit user-specified map(this[:]) clause already exists.
9576 If not, we create an implicit map(tofrom:this[:1]) clause. */
9577 for (tree *cp = clauses_ptr; *cp; cp = &OMP_CLAUSE_CHAIN (*cp))
9578 if (OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP
9579 && (TREE_CODE (OMP_CLAUSE_DECL (*cp)) == INDIRECT_REF
9580 || TREE_CODE (OMP_CLAUSE_DECL (*cp)) == MEM_REF)
9581 && operand_equal_p (TREE_OPERAND (OMP_CLAUSE_DECL (*cp), 0),
9582 omp_target_this_expr))
9583 {
9584 explicit_this_deref_map = cp;
9585 break;
9586 }
9587 }
9588
9589 if (DECL_LAMBDA_FUNCTION_P (current_function_decl)
9590 && (data.this_expr_accessed
9591 || !data.closure_vars_accessed.is_empty ()))
9592 {
9593 /* For lambda functions, we need to first create a copy of the
9594 __closure object. */
9595 tree closure = DECL_ARGUMENTS (current_function_decl);
9596 tree c = build_omp_clause (loc, OMP_CLAUSE_MAP);
9597 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
9598 OMP_CLAUSE_DECL (c)
9599 = build_indirect_ref (loc, closure, RO_UNARY_STAR);
9600 OMP_CLAUSE_SIZE (c)
9601 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (closure)));
9602 new_clauses.safe_push (c);
9603
9604 tree closure_obj = OMP_CLAUSE_DECL (c);
9605 tree closure_type = TREE_TYPE (closure_obj);
9606
9607 gcc_assert (LAMBDA_TYPE_P (closure_type)
9608 && CLASS_TYPE_P (closure_type));
9609
9610 tree c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
9611 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_FIRSTPRIVATE_POINTER);
9612 OMP_CLAUSE_DECL (c2) = closure;
9613 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9614 new_clauses.safe_push (c2);
9615 }
9616
9617 if (data.this_expr_accessed)
9618 {
9619 /* If the this-expr was accessed, create a map(*this) clause. */
9620 enum gomp_map_kind kind = GOMP_MAP_TOFROM;
9621 if (explicit_this_deref_map)
9622 {
9623 tree this_map = *explicit_this_deref_map;
9624 tree nc = OMP_CLAUSE_CHAIN (this_map);
9625 gcc_assert (nc != NULL_TREE
9626 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9627 && (OMP_CLAUSE_MAP_KIND (nc)
9628 == GOMP_MAP_FIRSTPRIVATE_POINTER));
9629 kind = OMP_CLAUSE_MAP_KIND (this_map);
9630 /* Remove the original 'map(*this) map(firstprivate_ptr:this)'
9631 two-map sequence away from the chain. */
9632 *explicit_this_deref_map = OMP_CLAUSE_CHAIN (nc);
9633 }
9634 tree c = build_omp_clause (loc, OMP_CLAUSE_MAP);
9635 OMP_CLAUSE_SET_MAP_KIND (c, kind);
9636 OMP_CLAUSE_DECL (c)
9637 = build_indirect_ref (loc, omp_target_this_expr, RO_UNARY_STAR);
9638 OMP_CLAUSE_SIZE (c)
9639 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (omp_target_this_expr)));
9640 new_clauses.safe_push (c);
9641
9642 /* If we're in a lambda function, the this-pointer will actually be
9643 '__closure->this', a mapped member of __closure, hence always_pointer.
9644 Otherwise it's a firstprivate pointer. */
9645 enum gomp_map_kind ptr_kind
9646 = (DECL_LAMBDA_FUNCTION_P (current_function_decl)
9647 ? GOMP_MAP_ALWAYS_POINTER
9648 : GOMP_MAP_FIRSTPRIVATE_POINTER);
9649 c = build_omp_clause (loc, OMP_CLAUSE_MAP);
9650 OMP_CLAUSE_SET_MAP_KIND (c, ptr_kind);
9651 OMP_CLAUSE_DECL (c) = omp_target_this_expr;
9652 OMP_CLAUSE_SIZE (c) = size_zero_node;
9653 new_clauses.safe_push (c);
9654 }
9655
9656 if (DECL_LAMBDA_FUNCTION_P (current_function_decl))
9657 {
9658 if (omp_target_this_expr)
9659 {
9660 STRIP_NOPS (omp_target_this_expr);
9661 gcc_assert (DECL_HAS_VALUE_EXPR_P (omp_target_this_expr));
9662 omp_target_this_expr = DECL_VALUE_EXPR (omp_target_this_expr);
9663 }
9664
9665 for (hash_set<tree>::iterator i = data.closure_vars_accessed.begin ();
9666 i != data.closure_vars_accessed.end (); ++i)
9667 {
9668 tree orig_decl = *i;
9669 tree closure_expr = DECL_VALUE_EXPR (orig_decl);
9670
9671 if (TREE_CODE (TREE_TYPE (orig_decl)) == POINTER_TYPE
9672 || TREE_CODE (TREE_TYPE (orig_decl)) == REFERENCE_TYPE)
9673 {
9674 /* this-pointer is processed above, outside this loop. */
9675 if (omp_target_this_expr
9676 && operand_equal_p (closure_expr, omp_target_this_expr))
9677 continue;
9678
9679 bool ptr_p = TREE_CODE (TREE_TYPE (orig_decl)) == POINTER_TYPE;
9680 enum gomp_map_kind kind, ptr_kind, nc_kind;
9681 tree size;
9682
9683 if (ptr_p)
9684 {
9685 /* For pointers, default mapped as zero-length array
9686 section. */
9687 kind = GOMP_MAP_ALLOC;
9688 nc_kind = GOMP_MAP_FIRSTPRIVATE_POINTER;
9689 ptr_kind = GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION;
9690 size = size_zero_node;
9691 }
9692 else
9693 {
9694 /* For references, default mapped as appearing on map
9695 clause. */
9696 kind = GOMP_MAP_TOFROM;
9697 nc_kind = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
9698 ptr_kind = GOMP_MAP_ALWAYS_POINTER;
9699 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (closure_expr)));
9700 }
9701
9702 for (tree *p = clauses_ptr; *p; p = &OMP_CLAUSE_CHAIN (*p))
9703 if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_MAP
9704 && (TREE_CODE (OMP_CLAUSE_DECL (*p)) == INDIRECT_REF
9705 || TREE_CODE (OMP_CLAUSE_DECL (*p)) == MEM_REF)
9706 && operand_equal_p (TREE_OPERAND (OMP_CLAUSE_DECL (*p), 0),
9707 orig_decl))
9708 {
9709 /* If this was already specified by user as a map,
9710 save the user specified map kind, delete the
9711 "map(*ptr/ref), map(firstprivate ptr/ref)" sequence,
9712 and insert our own sequence:
9713 "map(*__closure->ptr/ref), map(<ptr_kind>:__closure->ref"
9714 */
9715 tree nc = OMP_CLAUSE_CHAIN (*p);
9716 gcc_assert (nc != NULL_TREE
9717 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
9718 && OMP_CLAUSE_MAP_KIND (nc) == nc_kind);
9719 /* Update with user specified kind and size. */
9720 kind = OMP_CLAUSE_MAP_KIND (*p);
9721 size = OMP_CLAUSE_SIZE (*p);
9722 *p = OMP_CLAUSE_CHAIN (nc);
9723 break;
9724 }
9725
9726 tree c = build_omp_clause (loc, OMP_CLAUSE_MAP);
9727 OMP_CLAUSE_SET_MAP_KIND (c, kind);
9728 OMP_CLAUSE_DECL (c)
9729 = build_indirect_ref (loc, closure_expr, RO_UNARY_STAR);
9730 OMP_CLAUSE_SIZE (c) = size;
9731 if (ptr_p)
9732 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c) = 1;
9733 new_clauses.safe_push (c);
9734
9735 c = build_omp_clause (loc, OMP_CLAUSE_MAP);
9736 OMP_CLAUSE_SET_MAP_KIND (c, ptr_kind);
9737 OMP_CLAUSE_DECL (c) = closure_expr;
9738 OMP_CLAUSE_SIZE (c) = size_zero_node;
9739 new_clauses.safe_push (c);
9740 }
9741 }
9742 }
9743
9744 if (!data.ptr_members_accessed.is_empty ())
9745 for (hash_map<tree, tree>::iterator i = data.ptr_members_accessed.begin ();
9746 i != data.ptr_members_accessed.end (); ++i)
9747 {
9748 /* For each referenced member that is of pointer or reference-to-pointer
9749 type, create the equivalent of map(alloc:this->ptr[:0]). */
9750 tree field_decl = (*i).first;
9751 tree ptr_member = (*i).second;
9752
9753 for (tree c = *clauses_ptr; c; c = OMP_CLAUSE_CHAIN (c))
9754 {
9755 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
9756 continue;
9757 /* If map(this->ptr[:N]) already exists, avoid creating another
9758 such map. */
9759 tree decl = OMP_CLAUSE_DECL (c);
9760 if ((TREE_CODE (decl) == INDIRECT_REF
9761 || TREE_CODE (decl) == MEM_REF)
9762 && operand_equal_p (TREE_OPERAND (decl, 0), ptr_member))
9763 goto next_ptr_member;
9764 }
9765
9766 if (!cxx_mark_addressable (ptr_member))
9767 gcc_unreachable ();
9768
9769 if (TREE_CODE (TREE_TYPE (field_decl)) == REFERENCE_TYPE)
9770 {
9771 /* For reference to pointers, we need to map the referenced
9772 pointer first for things to be correct. */
9773 tree ptr_member_type = TREE_TYPE (ptr_member);
9774
9775 /* Map pointer target as zero-length array section. */
9776 tree c = build_omp_clause (loc, OMP_CLAUSE_MAP);
9777 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALLOC);
9778 OMP_CLAUSE_DECL (c)
9779 = build1 (INDIRECT_REF, TREE_TYPE (ptr_member_type), ptr_member);
9780 OMP_CLAUSE_SIZE (c) = size_zero_node;
9781 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c) = 1;
9782
9783 /* Map pointer to zero-length array section. */
9784 tree c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
9785 OMP_CLAUSE_SET_MAP_KIND
9786 (c2, GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION);
9787 OMP_CLAUSE_DECL (c2) = ptr_member;
9788 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9789
9790 /* Attach reference-to-pointer field to pointer. */
9791 tree c3 = build_omp_clause (loc, OMP_CLAUSE_MAP);
9792 OMP_CLAUSE_SET_MAP_KIND (c3, GOMP_MAP_ATTACH);
9793 OMP_CLAUSE_DECL (c3) = TREE_OPERAND (ptr_member, 0);
9794 OMP_CLAUSE_SIZE (c3) = size_zero_node;
9795
9796 new_clauses.safe_push (c);
9797 new_clauses.safe_push (c2);
9798 new_clauses.safe_push (c3);
9799 }
9800 else if (TREE_CODE (TREE_TYPE (field_decl)) == POINTER_TYPE)
9801 {
9802 /* Map pointer target as zero-length array section. */
9803 tree c = build_omp_clause (loc, OMP_CLAUSE_MAP);
9804 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALLOC);
9805 OMP_CLAUSE_DECL (c) = build_indirect_ref (loc, ptr_member,
9806 RO_UNARY_STAR);
9807 OMP_CLAUSE_SIZE (c) = size_zero_node;
9808 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c) = 1;
9809
9810 /* Attach zero-length array section to pointer. */
9811 tree c2 = build_omp_clause (loc, OMP_CLAUSE_MAP);
9812 OMP_CLAUSE_SET_MAP_KIND
9813 (c2, GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION);
9814 OMP_CLAUSE_DECL (c2) = ptr_member;
9815 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9816
9817 new_clauses.safe_push (c);
9818 new_clauses.safe_push (c2);
9819 }
9820 else
9821 gcc_unreachable ();
9822
9823 next_ptr_member:
9824 ;
9825 }
9826
9827 for (hash_set<tree>::iterator i = data.lambda_objects_accessed.begin ();
9828 i != data.lambda_objects_accessed.end (); ++i)
9829 {
9830 tree lobj = *i;
9831 if (TREE_CODE (lobj) == TARGET_EXPR)
9832 lobj = TREE_OPERAND (lobj, 0);
9833
9834 tree lt = TREE_TYPE (lobj);
9835 gcc_assert (LAMBDA_TYPE_P (lt) && CLASS_TYPE_P (lt));
9836
9837 tree lc = build_omp_clause (loc, OMP_CLAUSE_MAP);
9838 OMP_CLAUSE_SET_MAP_KIND (lc, GOMP_MAP_TO);
9839 OMP_CLAUSE_DECL (lc) = lobj;
9840 OMP_CLAUSE_SIZE (lc) = TYPE_SIZE_UNIT (lt);
9841 new_clauses.safe_push (lc);
9842
9843 for (tree fld = TYPE_FIELDS (lt); fld; fld = DECL_CHAIN (fld))
9844 {
9845 if (TREE_CODE (TREE_TYPE (fld)) == POINTER_TYPE)
9846 {
9847 tree exp = build3 (COMPONENT_REF, TREE_TYPE (fld),
9848 lobj, fld, NULL_TREE);
9849 tree c = build_omp_clause (loc, OMP_CLAUSE_MAP);
9850 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALLOC);
9851 OMP_CLAUSE_DECL (c)
9852 = build_indirect_ref (loc, exp, RO_UNARY_STAR);
9853 OMP_CLAUSE_SIZE (c) = size_zero_node;
9854 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c) = 1;
9855 new_clauses.safe_push (c);
9856
9857 c = build_omp_clause (loc, OMP_CLAUSE_MAP);
9858 OMP_CLAUSE_SET_MAP_KIND
9859 (c, GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION);
9860 OMP_CLAUSE_DECL (c) = exp;
9861 OMP_CLAUSE_SIZE (c) = size_zero_node;
9862 new_clauses.safe_push (c);
9863 }
9864 else if (TREE_CODE (TREE_TYPE (fld)) == REFERENCE_TYPE)
9865 {
9866 tree exp = build3 (COMPONENT_REF, TREE_TYPE (fld),
9867 lobj, fld, NULL_TREE);
9868 tree c = build_omp_clause (loc, OMP_CLAUSE_MAP);
9869 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9870 OMP_CLAUSE_DECL (c)
9871 = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (exp)), exp);
9872 OMP_CLAUSE_SIZE (c)
9873 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (exp)));
9874 new_clauses.safe_push (c);
9875
9876 c = build_omp_clause (loc, OMP_CLAUSE_MAP);
9877 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
9878 OMP_CLAUSE_DECL (c) = exp;
9879 OMP_CLAUSE_SIZE (c) = size_zero_node;
9880 new_clauses.safe_push (c);
9881 }
9882 }
9883 }
9884
9885 tree c = *clauses_ptr;
9886 for (int i = new_clauses.length () - 1; i >= 0; i--)
9887 {
9888 OMP_CLAUSE_CHAIN (new_clauses[i]) = c;
9889 c = new_clauses[i];
9890 }
9891 *clauses_ptr = c;
9892 }
9893
9894 /* Called from cp_parser_omp_target. Create additional implicit clauses for
9895 OpenMP target directives, and do sanity checks. */
9896
9897 tree
finish_omp_target(location_t loc,tree clauses,tree body,bool combined_p)9898 finish_omp_target (location_t loc, tree clauses, tree body, bool combined_p)
9899 {
9900 if (!processing_template_decl)
9901 finish_omp_target_clauses (loc, body, &clauses);
9902
9903 tree stmt = make_node (OMP_TARGET);
9904 TREE_TYPE (stmt) = void_type_node;
9905 OMP_TARGET_CLAUSES (stmt) = clauses;
9906 OMP_TARGET_BODY (stmt) = body;
9907 OMP_TARGET_COMBINED (stmt) = combined_p;
9908 SET_EXPR_LOCATION (stmt, loc);
9909
9910 tree c = clauses;
9911 while (c)
9912 {
9913 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
9914 switch (OMP_CLAUSE_MAP_KIND (c))
9915 {
9916 case GOMP_MAP_TO:
9917 case GOMP_MAP_ALWAYS_TO:
9918 case GOMP_MAP_FROM:
9919 case GOMP_MAP_ALWAYS_FROM:
9920 case GOMP_MAP_TOFROM:
9921 case GOMP_MAP_ALWAYS_TOFROM:
9922 case GOMP_MAP_ALLOC:
9923 case GOMP_MAP_FIRSTPRIVATE_POINTER:
9924 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9925 case GOMP_MAP_ALWAYS_POINTER:
9926 case GOMP_MAP_ATTACH_DETACH:
9927 case GOMP_MAP_ATTACH:
9928 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
9929 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
9930 break;
9931 default:
9932 error_at (OMP_CLAUSE_LOCATION (c),
9933 "%<#pragma omp target%> with map-type other "
9934 "than %<to%>, %<from%>, %<tofrom%> or %<alloc%> "
9935 "on %<map%> clause");
9936 break;
9937 }
9938 c = OMP_CLAUSE_CHAIN (c);
9939 }
9940 return add_stmt (stmt);
9941 }
9942
9943 tree
finish_omp_parallel(tree clauses,tree body)9944 finish_omp_parallel (tree clauses, tree body)
9945 {
9946 tree stmt;
9947
9948 body = finish_omp_structured_block (body);
9949
9950 stmt = make_node (OMP_PARALLEL);
9951 TREE_TYPE (stmt) = void_type_node;
9952 OMP_PARALLEL_CLAUSES (stmt) = clauses;
9953 OMP_PARALLEL_BODY (stmt) = body;
9954
9955 return add_stmt (stmt);
9956 }
9957
9958 tree
begin_omp_task(void)9959 begin_omp_task (void)
9960 {
9961 keep_next_level (true);
9962 return begin_omp_structured_block ();
9963 }
9964
9965 tree
finish_omp_task(tree clauses,tree body)9966 finish_omp_task (tree clauses, tree body)
9967 {
9968 tree stmt;
9969
9970 body = finish_omp_structured_block (body);
9971
9972 stmt = make_node (OMP_TASK);
9973 TREE_TYPE (stmt) = void_type_node;
9974 OMP_TASK_CLAUSES (stmt) = clauses;
9975 OMP_TASK_BODY (stmt) = body;
9976
9977 return add_stmt (stmt);
9978 }
9979
9980 /* Helper function for finish_omp_for. Convert Ith random access iterator
9981 into integral iterator. Return FALSE if successful. */
9982
9983 static bool
handle_omp_for_class_iterator(int i,location_t locus,enum tree_code code,tree declv,tree orig_declv,tree initv,tree condv,tree incrv,tree * body,tree * pre_body,tree & clauses,int collapse,int ordered)9984 handle_omp_for_class_iterator (int i, location_t locus, enum tree_code code,
9985 tree declv, tree orig_declv, tree initv,
9986 tree condv, tree incrv, tree *body,
9987 tree *pre_body, tree &clauses,
9988 int collapse, int ordered)
9989 {
9990 tree diff, iter_init, iter_incr = NULL, last;
9991 tree incr_var = NULL, orig_pre_body, orig_body, c;
9992 tree decl = TREE_VEC_ELT (declv, i);
9993 tree init = TREE_VEC_ELT (initv, i);
9994 tree cond = TREE_VEC_ELT (condv, i);
9995 tree incr = TREE_VEC_ELT (incrv, i);
9996 tree iter = decl;
9997 location_t elocus = locus;
9998
9999 if (init && EXPR_HAS_LOCATION (init))
10000 elocus = EXPR_LOCATION (init);
10001
10002 switch (TREE_CODE (cond))
10003 {
10004 case GT_EXPR:
10005 case GE_EXPR:
10006 case LT_EXPR:
10007 case LE_EXPR:
10008 case NE_EXPR:
10009 if (TREE_OPERAND (cond, 1) == iter)
10010 cond = build2 (swap_tree_comparison (TREE_CODE (cond)),
10011 TREE_TYPE (cond), iter, TREE_OPERAND (cond, 0));
10012 if (TREE_OPERAND (cond, 0) != iter)
10013 cond = error_mark_node;
10014 else
10015 {
10016 tree tem = build_x_binary_op (EXPR_LOCATION (cond),
10017 TREE_CODE (cond),
10018 iter, ERROR_MARK,
10019 TREE_OPERAND (cond, 1), ERROR_MARK,
10020 NULL_TREE, NULL, tf_warning_or_error);
10021 if (error_operand_p (tem))
10022 return true;
10023 }
10024 break;
10025 default:
10026 cond = error_mark_node;
10027 break;
10028 }
10029 if (cond == error_mark_node)
10030 {
10031 error_at (elocus, "invalid controlling predicate");
10032 return true;
10033 }
10034 diff = build_x_binary_op (elocus, MINUS_EXPR,
10035 TREE_OPERAND (cond, 1), ERROR_MARK,
10036 iter, ERROR_MARK,
10037 NULL_TREE, NULL, tf_warning_or_error);
10038 diff = cp_fully_fold (diff);
10039 if (error_operand_p (diff))
10040 return true;
10041 if (TREE_CODE (TREE_TYPE (diff)) != INTEGER_TYPE)
10042 {
10043 error_at (elocus, "difference between %qE and %qD does not have integer type",
10044 TREE_OPERAND (cond, 1), iter);
10045 return true;
10046 }
10047 if (!c_omp_check_loop_iv_exprs (locus, code, orig_declv, i,
10048 TREE_VEC_ELT (declv, i), NULL_TREE,
10049 cond, cp_walk_subtrees))
10050 return true;
10051
10052 switch (TREE_CODE (incr))
10053 {
10054 case PREINCREMENT_EXPR:
10055 case PREDECREMENT_EXPR:
10056 case POSTINCREMENT_EXPR:
10057 case POSTDECREMENT_EXPR:
10058 if (TREE_OPERAND (incr, 0) != iter)
10059 {
10060 incr = error_mark_node;
10061 break;
10062 }
10063 iter_incr = build_x_unary_op (EXPR_LOCATION (incr),
10064 TREE_CODE (incr), iter,
10065 NULL_TREE, tf_warning_or_error);
10066 if (error_operand_p (iter_incr))
10067 return true;
10068 else if (TREE_CODE (incr) == PREINCREMENT_EXPR
10069 || TREE_CODE (incr) == POSTINCREMENT_EXPR)
10070 incr = integer_one_node;
10071 else
10072 incr = integer_minus_one_node;
10073 break;
10074 case MODIFY_EXPR:
10075 if (TREE_OPERAND (incr, 0) != iter)
10076 incr = error_mark_node;
10077 else if (TREE_CODE (TREE_OPERAND (incr, 1)) == PLUS_EXPR
10078 || TREE_CODE (TREE_OPERAND (incr, 1)) == MINUS_EXPR)
10079 {
10080 tree rhs = TREE_OPERAND (incr, 1);
10081 if (TREE_OPERAND (rhs, 0) == iter)
10082 {
10083 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 1)))
10084 != INTEGER_TYPE)
10085 incr = error_mark_node;
10086 else
10087 {
10088 iter_incr = build_x_modify_expr (EXPR_LOCATION (rhs),
10089 iter, TREE_CODE (rhs),
10090 TREE_OPERAND (rhs, 1),
10091 NULL_TREE,
10092 tf_warning_or_error);
10093 if (error_operand_p (iter_incr))
10094 return true;
10095 incr = TREE_OPERAND (rhs, 1);
10096 incr = cp_convert (TREE_TYPE (diff), incr,
10097 tf_warning_or_error);
10098 if (TREE_CODE (rhs) == MINUS_EXPR)
10099 {
10100 incr = build1 (NEGATE_EXPR, TREE_TYPE (diff), incr);
10101 incr = fold_simple (incr);
10102 }
10103 if (TREE_CODE (incr) != INTEGER_CST
10104 && (TREE_CODE (incr) != NOP_EXPR
10105 || (TREE_CODE (TREE_OPERAND (incr, 0))
10106 != INTEGER_CST)))
10107 iter_incr = NULL;
10108 }
10109 }
10110 else if (TREE_OPERAND (rhs, 1) == iter)
10111 {
10112 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (rhs, 0))) != INTEGER_TYPE
10113 || TREE_CODE (rhs) != PLUS_EXPR)
10114 incr = error_mark_node;
10115 else
10116 {
10117 iter_incr = build_x_binary_op (EXPR_LOCATION (rhs),
10118 PLUS_EXPR,
10119 TREE_OPERAND (rhs, 0),
10120 ERROR_MARK, iter,
10121 ERROR_MARK, NULL_TREE, NULL,
10122 tf_warning_or_error);
10123 if (error_operand_p (iter_incr))
10124 return true;
10125 iter_incr = build_x_modify_expr (EXPR_LOCATION (rhs),
10126 iter, NOP_EXPR,
10127 iter_incr, NULL_TREE,
10128 tf_warning_or_error);
10129 if (error_operand_p (iter_incr))
10130 return true;
10131 incr = TREE_OPERAND (rhs, 0);
10132 iter_incr = NULL;
10133 }
10134 }
10135 else
10136 incr = error_mark_node;
10137 }
10138 else
10139 incr = error_mark_node;
10140 break;
10141 default:
10142 incr = error_mark_node;
10143 break;
10144 }
10145
10146 if (incr == error_mark_node)
10147 {
10148 error_at (elocus, "invalid increment expression");
10149 return true;
10150 }
10151
10152 incr = cp_convert (TREE_TYPE (diff), incr, tf_warning_or_error);
10153 incr = cp_fully_fold (incr);
10154 tree loop_iv_seen = NULL_TREE;
10155 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
10156 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10157 && OMP_CLAUSE_DECL (c) == iter)
10158 {
10159 if (code == OMP_TASKLOOP || code == OMP_LOOP)
10160 {
10161 loop_iv_seen = c;
10162 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) = 1;
10163 }
10164 break;
10165 }
10166 else if ((code == OMP_TASKLOOP || code == OMP_LOOP)
10167 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
10168 && OMP_CLAUSE_DECL (c) == iter)
10169 {
10170 loop_iv_seen = c;
10171 if (code == OMP_TASKLOOP)
10172 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c) = 1;
10173 }
10174
10175 decl = create_temporary_var (TREE_TYPE (diff));
10176 pushdecl (decl);
10177 add_decl_expr (decl);
10178 last = create_temporary_var (TREE_TYPE (diff));
10179 pushdecl (last);
10180 add_decl_expr (last);
10181 if (c && iter_incr == NULL && TREE_CODE (incr) != INTEGER_CST
10182 && (!ordered || (i < collapse && collapse > 1)))
10183 {
10184 incr_var = create_temporary_var (TREE_TYPE (diff));
10185 pushdecl (incr_var);
10186 add_decl_expr (incr_var);
10187 }
10188 gcc_assert (stmts_are_full_exprs_p ());
10189 tree diffvar = NULL_TREE;
10190 if (code == OMP_TASKLOOP)
10191 {
10192 if (!loop_iv_seen)
10193 {
10194 tree ivc = build_omp_clause (locus, OMP_CLAUSE_FIRSTPRIVATE);
10195 OMP_CLAUSE_DECL (ivc) = iter;
10196 cxx_omp_finish_clause (ivc, NULL, false);
10197 OMP_CLAUSE_CHAIN (ivc) = clauses;
10198 clauses = ivc;
10199 }
10200 tree lvc = build_omp_clause (locus, OMP_CLAUSE_FIRSTPRIVATE);
10201 OMP_CLAUSE_DECL (lvc) = last;
10202 OMP_CLAUSE_CHAIN (lvc) = clauses;
10203 clauses = lvc;
10204 diffvar = create_temporary_var (TREE_TYPE (diff));
10205 pushdecl (diffvar);
10206 add_decl_expr (diffvar);
10207 }
10208 else if (code == OMP_LOOP)
10209 {
10210 if (!loop_iv_seen)
10211 {
10212 /* While iterators on the loop construct are predetermined
10213 lastprivate, if the decl is not declared inside of the
10214 loop, OMP_CLAUSE_LASTPRIVATE should have been added
10215 already. */
10216 loop_iv_seen = build_omp_clause (locus, OMP_CLAUSE_FIRSTPRIVATE);
10217 OMP_CLAUSE_DECL (loop_iv_seen) = iter;
10218 OMP_CLAUSE_CHAIN (loop_iv_seen) = clauses;
10219 clauses = loop_iv_seen;
10220 }
10221 else if (OMP_CLAUSE_CODE (loop_iv_seen) == OMP_CLAUSE_PRIVATE)
10222 {
10223 OMP_CLAUSE_PRIVATE_DEBUG (loop_iv_seen) = 0;
10224 OMP_CLAUSE_PRIVATE_OUTER_REF (loop_iv_seen) = 0;
10225 OMP_CLAUSE_CODE (loop_iv_seen) = OMP_CLAUSE_FIRSTPRIVATE;
10226 }
10227 if (OMP_CLAUSE_CODE (loop_iv_seen) == OMP_CLAUSE_FIRSTPRIVATE)
10228 cxx_omp_finish_clause (loop_iv_seen, NULL, false);
10229 }
10230
10231 orig_pre_body = *pre_body;
10232 *pre_body = push_stmt_list ();
10233 if (orig_pre_body)
10234 add_stmt (orig_pre_body);
10235 if (init != NULL)
10236 finish_expr_stmt (build_x_modify_expr (elocus,
10237 iter, NOP_EXPR, init,
10238 NULL_TREE, tf_warning_or_error));
10239 init = build_int_cst (TREE_TYPE (diff), 0);
10240 if (c && iter_incr == NULL
10241 && (!ordered || (i < collapse && collapse > 1)))
10242 {
10243 if (incr_var)
10244 {
10245 finish_expr_stmt (build_x_modify_expr (elocus,
10246 incr_var, NOP_EXPR,
10247 incr, NULL_TREE,
10248 tf_warning_or_error));
10249 incr = incr_var;
10250 }
10251 iter_incr = build_x_modify_expr (elocus,
10252 iter, PLUS_EXPR, incr,
10253 NULL_TREE, tf_warning_or_error);
10254 }
10255 if (c && ordered && i < collapse && collapse > 1)
10256 iter_incr = incr;
10257 finish_expr_stmt (build_x_modify_expr (elocus,
10258 last, NOP_EXPR, init,
10259 NULL_TREE, tf_warning_or_error));
10260 if (diffvar)
10261 {
10262 finish_expr_stmt (build_x_modify_expr (elocus,
10263 diffvar, NOP_EXPR,
10264 diff, NULL_TREE, tf_warning_or_error));
10265 diff = diffvar;
10266 }
10267 *pre_body = pop_stmt_list (*pre_body);
10268
10269 cond = cp_build_binary_op (elocus,
10270 TREE_CODE (cond), decl, diff,
10271 tf_warning_or_error);
10272 incr = build_modify_expr (elocus, decl, NULL_TREE, PLUS_EXPR,
10273 elocus, incr, NULL_TREE);
10274
10275 orig_body = *body;
10276 *body = push_stmt_list ();
10277 iter_init = build2 (MINUS_EXPR, TREE_TYPE (diff), decl, last);
10278 iter_init = build_x_modify_expr (elocus,
10279 iter, PLUS_EXPR, iter_init,
10280 NULL_TREE, tf_warning_or_error);
10281 if (iter_init != error_mark_node)
10282 iter_init = build1 (NOP_EXPR, void_type_node, iter_init);
10283 finish_expr_stmt (iter_init);
10284 finish_expr_stmt (build_x_modify_expr (elocus,
10285 last, NOP_EXPR, decl,
10286 NULL_TREE, tf_warning_or_error));
10287 add_stmt (orig_body);
10288 *body = pop_stmt_list (*body);
10289
10290 if (c)
10291 {
10292 OMP_CLAUSE_LASTPRIVATE_STMT (c) = push_stmt_list ();
10293 if (!ordered)
10294 finish_expr_stmt (iter_incr);
10295 else
10296 {
10297 iter_init = decl;
10298 if (i < collapse && collapse > 1 && !error_operand_p (iter_incr))
10299 iter_init = build2 (PLUS_EXPR, TREE_TYPE (diff),
10300 iter_init, iter_incr);
10301 iter_init = build2 (MINUS_EXPR, TREE_TYPE (diff), iter_init, last);
10302 iter_init = build_x_modify_expr (elocus,
10303 iter, PLUS_EXPR, iter_init,
10304 NULL_TREE, tf_warning_or_error);
10305 if (iter_init != error_mark_node)
10306 iter_init = build1 (NOP_EXPR, void_type_node, iter_init);
10307 finish_expr_stmt (iter_init);
10308 }
10309 OMP_CLAUSE_LASTPRIVATE_STMT (c)
10310 = pop_stmt_list (OMP_CLAUSE_LASTPRIVATE_STMT (c));
10311 }
10312
10313 if (TREE_CODE (TREE_VEC_ELT (orig_declv, i)) == TREE_LIST)
10314 {
10315 tree t = TREE_VEC_ELT (orig_declv, i);
10316 gcc_assert (TREE_PURPOSE (t) == NULL_TREE
10317 && TREE_VALUE (t) == NULL_TREE
10318 && TREE_CODE (TREE_CHAIN (t)) == TREE_VEC);
10319 TREE_PURPOSE (t) = TREE_VEC_ELT (declv, i);
10320 TREE_VALUE (t) = last;
10321 }
10322 else
10323 TREE_VEC_ELT (orig_declv, i)
10324 = tree_cons (TREE_VEC_ELT (declv, i), last, NULL_TREE);
10325 TREE_VEC_ELT (declv, i) = decl;
10326 TREE_VEC_ELT (initv, i) = init;
10327 TREE_VEC_ELT (condv, i) = cond;
10328 TREE_VEC_ELT (incrv, i) = incr;
10329
10330 return false;
10331 }
10332
10333 /* Build and validate an OMP_FOR statement. CLAUSES, BODY, COND, INCR
10334 are directly for their associated operands in the statement. DECL
10335 and INIT are a combo; if DECL is NULL then INIT ought to be a
10336 MODIFY_EXPR, and the DECL should be extracted. PRE_BODY are
10337 optional statements that need to go before the loop into its
10338 sk_omp scope. */
10339
10340 tree
finish_omp_for(location_t locus,enum tree_code code,tree declv,tree orig_declv,tree initv,tree condv,tree incrv,tree body,tree pre_body,vec<tree> * orig_inits,tree clauses)10341 finish_omp_for (location_t locus, enum tree_code code, tree declv,
10342 tree orig_declv, tree initv, tree condv, tree incrv,
10343 tree body, tree pre_body, vec<tree> *orig_inits, tree clauses)
10344 {
10345 tree omp_for = NULL, orig_incr = NULL;
10346 tree decl = NULL, init, cond, incr;
10347 location_t elocus;
10348 int i;
10349 int collapse = 1;
10350 int ordered = 0;
10351
10352 gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (initv));
10353 gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (condv));
10354 gcc_assert (TREE_VEC_LENGTH (declv) == TREE_VEC_LENGTH (incrv));
10355 if (TREE_VEC_LENGTH (declv) > 1)
10356 {
10357 tree c;
10358
10359 c = omp_find_clause (clauses, OMP_CLAUSE_TILE);
10360 if (c)
10361 collapse = list_length (OMP_CLAUSE_TILE_LIST (c));
10362 else
10363 {
10364 c = omp_find_clause (clauses, OMP_CLAUSE_COLLAPSE);
10365 if (c)
10366 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
10367 if (collapse != TREE_VEC_LENGTH (declv))
10368 ordered = TREE_VEC_LENGTH (declv);
10369 }
10370 }
10371 for (i = 0; i < TREE_VEC_LENGTH (declv); i++)
10372 {
10373 decl = TREE_VEC_ELT (declv, i);
10374 init = TREE_VEC_ELT (initv, i);
10375 cond = TREE_VEC_ELT (condv, i);
10376 incr = TREE_VEC_ELT (incrv, i);
10377 elocus = locus;
10378
10379 if (decl == NULL)
10380 {
10381 if (init != NULL)
10382 switch (TREE_CODE (init))
10383 {
10384 case MODIFY_EXPR:
10385 decl = TREE_OPERAND (init, 0);
10386 init = TREE_OPERAND (init, 1);
10387 break;
10388 case MODOP_EXPR:
10389 if (TREE_CODE (TREE_OPERAND (init, 1)) == NOP_EXPR)
10390 {
10391 decl = TREE_OPERAND (init, 0);
10392 init = TREE_OPERAND (init, 2);
10393 }
10394 break;
10395 default:
10396 break;
10397 }
10398
10399 if (decl == NULL)
10400 {
10401 error_at (locus,
10402 "expected iteration declaration or initialization");
10403 return NULL;
10404 }
10405 }
10406
10407 if (init && EXPR_HAS_LOCATION (init))
10408 elocus = EXPR_LOCATION (init);
10409
10410 if (cond == global_namespace)
10411 continue;
10412
10413 if (cond == NULL)
10414 {
10415 error_at (elocus, "missing controlling predicate");
10416 return NULL;
10417 }
10418
10419 if (incr == NULL)
10420 {
10421 error_at (elocus, "missing increment expression");
10422 return NULL;
10423 }
10424
10425 TREE_VEC_ELT (declv, i) = decl;
10426 TREE_VEC_ELT (initv, i) = init;
10427 }
10428
10429 if (orig_inits)
10430 {
10431 bool fail = false;
10432 tree orig_init;
10433 FOR_EACH_VEC_ELT (*orig_inits, i, orig_init)
10434 if (orig_init
10435 && !c_omp_check_loop_iv_exprs (locus, code,
10436 orig_declv ? orig_declv : declv, i,
10437 TREE_VEC_ELT (declv, i), orig_init,
10438 NULL_TREE, cp_walk_subtrees))
10439 fail = true;
10440 if (fail)
10441 return NULL;
10442 }
10443
10444 if (dependent_omp_for_p (declv, initv, condv, incrv))
10445 {
10446 tree stmt;
10447
10448 stmt = make_node (code);
10449
10450 for (i = 0; i < TREE_VEC_LENGTH (declv); i++)
10451 {
10452 /* This is really just a place-holder. We'll be decomposing this
10453 again and going through the cp_build_modify_expr path below when
10454 we instantiate the thing. */
10455 TREE_VEC_ELT (initv, i)
10456 = build2 (MODIFY_EXPR, void_type_node, TREE_VEC_ELT (declv, i),
10457 TREE_VEC_ELT (initv, i));
10458 }
10459
10460 TREE_TYPE (stmt) = void_type_node;
10461 OMP_FOR_INIT (stmt) = initv;
10462 OMP_FOR_COND (stmt) = condv;
10463 OMP_FOR_INCR (stmt) = incrv;
10464 OMP_FOR_BODY (stmt) = body;
10465 OMP_FOR_PRE_BODY (stmt) = pre_body;
10466 OMP_FOR_CLAUSES (stmt) = clauses;
10467
10468 SET_EXPR_LOCATION (stmt, locus);
10469 return add_stmt (stmt);
10470 }
10471
10472 if (!orig_declv)
10473 orig_declv = copy_node (declv);
10474
10475 if (processing_template_decl)
10476 orig_incr = make_tree_vec (TREE_VEC_LENGTH (incrv));
10477
10478 for (i = 0; i < TREE_VEC_LENGTH (declv); )
10479 {
10480 decl = TREE_VEC_ELT (declv, i);
10481 init = TREE_VEC_ELT (initv, i);
10482 cond = TREE_VEC_ELT (condv, i);
10483 incr = TREE_VEC_ELT (incrv, i);
10484 if (orig_incr)
10485 TREE_VEC_ELT (orig_incr, i) = incr;
10486 elocus = locus;
10487
10488 if (init && EXPR_HAS_LOCATION (init))
10489 elocus = EXPR_LOCATION (init);
10490
10491 if (!DECL_P (decl))
10492 {
10493 error_at (elocus, "expected iteration declaration or initialization");
10494 return NULL;
10495 }
10496
10497 if (incr && TREE_CODE (incr) == MODOP_EXPR)
10498 {
10499 if (orig_incr)
10500 TREE_VEC_ELT (orig_incr, i) = incr;
10501 incr = cp_build_modify_expr (elocus, TREE_OPERAND (incr, 0),
10502 TREE_CODE (TREE_OPERAND (incr, 1)),
10503 TREE_OPERAND (incr, 2),
10504 tf_warning_or_error);
10505 }
10506
10507 if (CLASS_TYPE_P (TREE_TYPE (decl)))
10508 {
10509 if (code == OMP_SIMD)
10510 {
10511 error_at (elocus, "%<#pragma omp simd%> used with class "
10512 "iteration variable %qE", decl);
10513 return NULL;
10514 }
10515 if (handle_omp_for_class_iterator (i, locus, code, declv, orig_declv,
10516 initv, condv, incrv, &body,
10517 &pre_body, clauses,
10518 collapse, ordered))
10519 return NULL;
10520 continue;
10521 }
10522
10523 if (!INTEGRAL_TYPE_P (TREE_TYPE (decl))
10524 && !TYPE_PTR_P (TREE_TYPE (decl)))
10525 {
10526 error_at (elocus, "invalid type for iteration variable %qE", decl);
10527 return NULL;
10528 }
10529
10530 if (!processing_template_decl && TREE_CODE (init) != TREE_VEC)
10531 init = cp_build_modify_expr (elocus, decl, NOP_EXPR, init,
10532 tf_warning_or_error);
10533 else
10534 init = build2 (MODIFY_EXPR, void_type_node, decl, init);
10535 if (decl == error_mark_node || init == error_mark_node)
10536 return NULL;
10537
10538 TREE_VEC_ELT (declv, i) = decl;
10539 TREE_VEC_ELT (initv, i) = init;
10540 TREE_VEC_ELT (condv, i) = cond;
10541 TREE_VEC_ELT (incrv, i) = incr;
10542 i++;
10543 }
10544
10545 if (pre_body && IS_EMPTY_STMT (pre_body))
10546 pre_body = NULL;
10547
10548 omp_for = c_finish_omp_for (locus, code, declv, orig_declv, initv, condv,
10549 incrv, body, pre_body,
10550 !processing_template_decl);
10551
10552 /* Check for iterators appearing in lb, b or incr expressions. */
10553 if (omp_for && !c_omp_check_loop_iv (omp_for, orig_declv, cp_walk_subtrees))
10554 omp_for = NULL_TREE;
10555
10556 if (omp_for == NULL)
10557 return NULL;
10558
10559 add_stmt (omp_for);
10560
10561 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INCR (omp_for)); i++)
10562 {
10563 init = TREE_VEC_ELT (OMP_FOR_INIT (omp_for), i);
10564 decl = TREE_OPERAND (init, 0);
10565 cond = TREE_VEC_ELT (OMP_FOR_COND (omp_for), i);
10566 incr = TREE_VEC_ELT (OMP_FOR_INCR (omp_for), i);
10567
10568 if (!processing_template_decl)
10569 {
10570 if (TREE_CODE (TREE_OPERAND (init, 1)) == TREE_VEC)
10571 {
10572 tree t = TREE_VEC_ELT (TREE_OPERAND (init, 1), 1);
10573 TREE_VEC_ELT (TREE_OPERAND (init, 1), 1)
10574 = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
10575 t = TREE_VEC_ELT (TREE_OPERAND (init, 1), 2);
10576 TREE_VEC_ELT (TREE_OPERAND (init, 1), 2)
10577 = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
10578 }
10579 else
10580 {
10581 tree t = TREE_OPERAND (init, 1);
10582 TREE_OPERAND (init, 1)
10583 = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
10584 }
10585 if (TREE_CODE (TREE_OPERAND (cond, 1)) == TREE_VEC)
10586 {
10587 tree t = TREE_VEC_ELT (TREE_OPERAND (cond, 1), 1);
10588 TREE_VEC_ELT (TREE_OPERAND (cond, 1), 1)
10589 = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
10590 t = TREE_VEC_ELT (TREE_OPERAND (cond, 1), 2);
10591 TREE_VEC_ELT (TREE_OPERAND (cond, 1), 2)
10592 = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
10593 }
10594 else
10595 {
10596 tree t = TREE_OPERAND (cond, 1);
10597 TREE_OPERAND (cond, 1)
10598 = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
10599 }
10600 }
10601
10602 if (TREE_CODE (incr) != MODIFY_EXPR)
10603 continue;
10604
10605 if (TREE_SIDE_EFFECTS (TREE_OPERAND (incr, 1))
10606 && BINARY_CLASS_P (TREE_OPERAND (incr, 1))
10607 && !processing_template_decl)
10608 {
10609 tree t = TREE_OPERAND (TREE_OPERAND (incr, 1), 0);
10610 if (TREE_SIDE_EFFECTS (t)
10611 && t != decl
10612 && (TREE_CODE (t) != NOP_EXPR
10613 || TREE_OPERAND (t, 0) != decl))
10614 TREE_OPERAND (TREE_OPERAND (incr, 1), 0)
10615 = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
10616
10617 t = TREE_OPERAND (TREE_OPERAND (incr, 1), 1);
10618 if (TREE_SIDE_EFFECTS (t)
10619 && t != decl
10620 && (TREE_CODE (t) != NOP_EXPR
10621 || TREE_OPERAND (t, 0) != decl))
10622 TREE_OPERAND (TREE_OPERAND (incr, 1), 1)
10623 = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
10624 }
10625
10626 if (orig_incr)
10627 TREE_VEC_ELT (OMP_FOR_INCR (omp_for), i) = TREE_VEC_ELT (orig_incr, i);
10628 }
10629 OMP_FOR_CLAUSES (omp_for) = clauses;
10630
10631 /* For simd loops with non-static data member iterators, we could have added
10632 OMP_CLAUSE_LINEAR clauses without OMP_CLAUSE_LINEAR_STEP. As we know the
10633 step at this point, fill it in. */
10634 if (code == OMP_SIMD && !processing_template_decl
10635 && TREE_VEC_LENGTH (OMP_FOR_INCR (omp_for)) == 1)
10636 for (tree c = omp_find_clause (clauses, OMP_CLAUSE_LINEAR); c;
10637 c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_LINEAR))
10638 if (OMP_CLAUSE_LINEAR_STEP (c) == NULL_TREE)
10639 {
10640 decl = TREE_OPERAND (TREE_VEC_ELT (OMP_FOR_INIT (omp_for), 0), 0);
10641 gcc_assert (decl == OMP_CLAUSE_DECL (c));
10642 incr = TREE_VEC_ELT (OMP_FOR_INCR (omp_for), 0);
10643 tree step, stept;
10644 switch (TREE_CODE (incr))
10645 {
10646 case PREINCREMENT_EXPR:
10647 case POSTINCREMENT_EXPR:
10648 /* c_omp_for_incr_canonicalize_ptr() should have been
10649 called to massage things appropriately. */
10650 gcc_assert (!INDIRECT_TYPE_P (TREE_TYPE (decl)));
10651 OMP_CLAUSE_LINEAR_STEP (c) = build_int_cst (TREE_TYPE (decl), 1);
10652 break;
10653 case PREDECREMENT_EXPR:
10654 case POSTDECREMENT_EXPR:
10655 /* c_omp_for_incr_canonicalize_ptr() should have been
10656 called to massage things appropriately. */
10657 gcc_assert (!INDIRECT_TYPE_P (TREE_TYPE (decl)));
10658 OMP_CLAUSE_LINEAR_STEP (c)
10659 = build_int_cst (TREE_TYPE (decl), -1);
10660 break;
10661 case MODIFY_EXPR:
10662 gcc_assert (TREE_OPERAND (incr, 0) == decl);
10663 incr = TREE_OPERAND (incr, 1);
10664 switch (TREE_CODE (incr))
10665 {
10666 case PLUS_EXPR:
10667 if (TREE_OPERAND (incr, 1) == decl)
10668 step = TREE_OPERAND (incr, 0);
10669 else
10670 step = TREE_OPERAND (incr, 1);
10671 break;
10672 case MINUS_EXPR:
10673 case POINTER_PLUS_EXPR:
10674 gcc_assert (TREE_OPERAND (incr, 0) == decl);
10675 step = TREE_OPERAND (incr, 1);
10676 break;
10677 default:
10678 gcc_unreachable ();
10679 }
10680 stept = TREE_TYPE (decl);
10681 if (INDIRECT_TYPE_P (stept))
10682 stept = sizetype;
10683 step = fold_convert (stept, step);
10684 if (TREE_CODE (incr) == MINUS_EXPR)
10685 step = fold_build1 (NEGATE_EXPR, stept, step);
10686 OMP_CLAUSE_LINEAR_STEP (c) = step;
10687 break;
10688 default:
10689 gcc_unreachable ();
10690 }
10691 }
10692 /* Override saved methods on OMP_LOOP's OMP_CLAUSE_LASTPRIVATE_LOOP_IV
10693 clauses, we need copy ctor for those rather than default ctor,
10694 plus as for other lastprivates assignment op and dtor. */
10695 if (code == OMP_LOOP && !processing_template_decl)
10696 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10697 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10698 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
10699 && cxx_omp_create_clause_info (c, TREE_TYPE (OMP_CLAUSE_DECL (c)),
10700 false, true, true, true))
10701 CP_OMP_CLAUSE_INFO (c) = NULL_TREE;
10702
10703 return omp_for;
10704 }
10705
10706 /* Fix up range for decls. Those decls were pushed into BIND's BIND_EXPR_VARS
10707 and need to be moved into the BIND_EXPR inside of the OMP_FOR's body. */
10708
10709 tree
finish_omp_for_block(tree bind,tree omp_for)10710 finish_omp_for_block (tree bind, tree omp_for)
10711 {
10712 if (omp_for == NULL_TREE
10713 || !OMP_FOR_ORIG_DECLS (omp_for)
10714 || bind == NULL_TREE
10715 || TREE_CODE (bind) != BIND_EXPR)
10716 return bind;
10717 tree b = NULL_TREE;
10718 for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (omp_for)); i++)
10719 if (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (omp_for), i)) == TREE_LIST
10720 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (omp_for), i)))
10721 {
10722 tree v = TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (omp_for), i));
10723 gcc_assert (BIND_EXPR_BLOCK (bind)
10724 && (BIND_EXPR_VARS (bind)
10725 == BLOCK_VARS (BIND_EXPR_BLOCK (bind))));
10726 for (int j = 2; j < TREE_VEC_LENGTH (v); j++)
10727 for (tree *p = &BIND_EXPR_VARS (bind); *p; p = &DECL_CHAIN (*p))
10728 {
10729 if (*p == TREE_VEC_ELT (v, j))
10730 {
10731 tree var = *p;
10732 *p = DECL_CHAIN (*p);
10733 if (b == NULL_TREE)
10734 {
10735 b = make_node (BLOCK);
10736 b = build3 (BIND_EXPR, void_type_node, NULL_TREE,
10737 OMP_FOR_BODY (omp_for), b);
10738 TREE_SIDE_EFFECTS (b) = 1;
10739 OMP_FOR_BODY (omp_for) = b;
10740 }
10741 DECL_CHAIN (var) = BIND_EXPR_VARS (b);
10742 BIND_EXPR_VARS (b) = var;
10743 BLOCK_VARS (BIND_EXPR_BLOCK (b)) = var;
10744 }
10745 }
10746 BLOCK_VARS (BIND_EXPR_BLOCK (bind)) = BIND_EXPR_VARS (bind);
10747 }
10748 return bind;
10749 }
10750
10751 void
finish_omp_atomic(location_t loc,enum tree_code code,enum tree_code opcode,tree lhs,tree rhs,tree v,tree lhs1,tree rhs1,tree r,tree clauses,enum omp_memory_order mo,bool weak)10752 finish_omp_atomic (location_t loc, enum tree_code code, enum tree_code opcode,
10753 tree lhs, tree rhs, tree v, tree lhs1, tree rhs1, tree r,
10754 tree clauses, enum omp_memory_order mo, bool weak)
10755 {
10756 tree orig_lhs;
10757 tree orig_rhs;
10758 tree orig_v;
10759 tree orig_lhs1;
10760 tree orig_rhs1;
10761 tree orig_r;
10762 bool dependent_p;
10763 tree stmt;
10764
10765 orig_lhs = lhs;
10766 orig_rhs = rhs;
10767 orig_v = v;
10768 orig_lhs1 = lhs1;
10769 orig_rhs1 = rhs1;
10770 orig_r = r;
10771 dependent_p = false;
10772 stmt = NULL_TREE;
10773
10774 /* Even in a template, we can detect invalid uses of the atomic
10775 pragma if neither LHS nor RHS is type-dependent. */
10776 if (processing_template_decl)
10777 {
10778 dependent_p = (type_dependent_expression_p (lhs)
10779 || (rhs && type_dependent_expression_p (rhs))
10780 || (v && type_dependent_expression_p (v))
10781 || (lhs1 && type_dependent_expression_p (lhs1))
10782 || (rhs1 && type_dependent_expression_p (rhs1))
10783 || (r
10784 && r != void_list_node
10785 && type_dependent_expression_p (r)));
10786 if (clauses)
10787 {
10788 gcc_assert (TREE_CODE (clauses) == OMP_CLAUSE
10789 && OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_HINT
10790 && OMP_CLAUSE_CHAIN (clauses) == NULL_TREE);
10791 if (type_dependent_expression_p (OMP_CLAUSE_HINT_EXPR (clauses))
10792 || TREE_CODE (OMP_CLAUSE_HINT_EXPR (clauses)) != INTEGER_CST)
10793 dependent_p = true;
10794 }
10795 if (!dependent_p)
10796 {
10797 lhs = build_non_dependent_expr (lhs);
10798 if (rhs)
10799 rhs = build_non_dependent_expr (rhs);
10800 if (v)
10801 v = build_non_dependent_expr (v);
10802 if (lhs1)
10803 lhs1 = build_non_dependent_expr (lhs1);
10804 if (rhs1)
10805 rhs1 = build_non_dependent_expr (rhs1);
10806 if (r && r != void_list_node)
10807 r = build_non_dependent_expr (r);
10808 }
10809 }
10810 if (!dependent_p)
10811 {
10812 bool swapped = false;
10813 if (rhs1 && opcode != COND_EXPR && cp_tree_equal (lhs, rhs))
10814 {
10815 std::swap (rhs, rhs1);
10816 swapped = !commutative_tree_code (opcode);
10817 }
10818 if (rhs1 && opcode != COND_EXPR && !cp_tree_equal (lhs, rhs1))
10819 {
10820 if (code == OMP_ATOMIC)
10821 error ("%<#pragma omp atomic update%> uses two different "
10822 "expressions for memory");
10823 else
10824 error ("%<#pragma omp atomic capture%> uses two different "
10825 "expressions for memory");
10826 return;
10827 }
10828 if (lhs1 && !cp_tree_equal (lhs, lhs1))
10829 {
10830 if (code == OMP_ATOMIC)
10831 error ("%<#pragma omp atomic update%> uses two different "
10832 "expressions for memory");
10833 else
10834 error ("%<#pragma omp atomic capture%> uses two different "
10835 "expressions for memory");
10836 return;
10837 }
10838 stmt = c_finish_omp_atomic (loc, code, opcode, lhs, rhs,
10839 v, lhs1, rhs1, r, swapped, mo, weak,
10840 processing_template_decl != 0);
10841 if (stmt == error_mark_node)
10842 return;
10843 }
10844 if (processing_template_decl)
10845 {
10846 if (code == OMP_ATOMIC_READ)
10847 {
10848 stmt = build_min_nt_loc (loc, OMP_ATOMIC_READ, orig_lhs);
10849 OMP_ATOMIC_MEMORY_ORDER (stmt) = mo;
10850 stmt = build2 (MODIFY_EXPR, void_type_node, orig_v, stmt);
10851 }
10852 else
10853 {
10854 if (opcode == NOP_EXPR)
10855 stmt = build2 (MODIFY_EXPR, void_type_node, orig_lhs, orig_rhs);
10856 else if (opcode == COND_EXPR)
10857 {
10858 stmt = build2 (EQ_EXPR, boolean_type_node, orig_lhs, orig_rhs);
10859 if (orig_r)
10860 stmt = build2 (MODIFY_EXPR, boolean_type_node, orig_r,
10861 stmt);
10862 stmt = build3 (COND_EXPR, void_type_node, stmt, orig_rhs1,
10863 orig_lhs);
10864 orig_rhs1 = NULL_TREE;
10865 }
10866 else
10867 stmt = build2 (opcode, void_type_node, orig_lhs, orig_rhs);
10868 if (orig_rhs1)
10869 stmt = build_min_nt_loc (EXPR_LOCATION (orig_rhs1),
10870 COMPOUND_EXPR, orig_rhs1, stmt);
10871 if (code != OMP_ATOMIC)
10872 {
10873 stmt = build_min_nt_loc (loc, code, orig_lhs1, stmt);
10874 OMP_ATOMIC_MEMORY_ORDER (stmt) = mo;
10875 OMP_ATOMIC_WEAK (stmt) = weak;
10876 stmt = build2 (MODIFY_EXPR, void_type_node, orig_v, stmt);
10877 }
10878 }
10879 stmt = build2 (OMP_ATOMIC, void_type_node,
10880 clauses ? clauses : integer_zero_node, stmt);
10881 OMP_ATOMIC_MEMORY_ORDER (stmt) = mo;
10882 OMP_ATOMIC_WEAK (stmt) = weak;
10883 SET_EXPR_LOCATION (stmt, loc);
10884 }
10885
10886 /* Avoid -Wunused-value warnings here, the whole construct has side-effects
10887 and even if it might be wrapped from fold-const.cc or c-omp.cc wrapped
10888 in some tree that appears to be unused, the value is not unused. */
10889 warning_sentinel w (warn_unused_value);
10890 finish_expr_stmt (stmt);
10891 }
10892
10893 void
finish_omp_barrier(void)10894 finish_omp_barrier (void)
10895 {
10896 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER);
10897 releasing_vec vec;
10898 tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
10899 finish_expr_stmt (stmt);
10900 }
10901
10902 void
finish_omp_depobj(location_t loc,tree depobj,enum omp_clause_depend_kind kind,tree clause)10903 finish_omp_depobj (location_t loc, tree depobj,
10904 enum omp_clause_depend_kind kind, tree clause)
10905 {
10906 if (!error_operand_p (depobj) && !type_dependent_expression_p (depobj))
10907 {
10908 if (!lvalue_p (depobj))
10909 {
10910 error_at (EXPR_LOC_OR_LOC (depobj, loc),
10911 "%<depobj%> expression is not lvalue expression");
10912 depobj = error_mark_node;
10913 }
10914 }
10915
10916 if (processing_template_decl)
10917 {
10918 if (clause == NULL_TREE)
10919 clause = build_int_cst (integer_type_node, kind);
10920 add_stmt (build_min_nt_loc (loc, OMP_DEPOBJ, depobj, clause));
10921 return;
10922 }
10923
10924 if (!error_operand_p (depobj))
10925 {
10926 tree addr = cp_build_addr_expr (depobj, tf_warning_or_error);
10927 if (addr == error_mark_node)
10928 depobj = error_mark_node;
10929 else
10930 depobj = cp_build_indirect_ref (loc, addr, RO_UNARY_STAR,
10931 tf_warning_or_error);
10932 }
10933
10934 c_finish_omp_depobj (loc, depobj, kind, clause);
10935 }
10936
10937 void
finish_omp_flush(int mo)10938 finish_omp_flush (int mo)
10939 {
10940 tree fn = builtin_decl_explicit (BUILT_IN_SYNC_SYNCHRONIZE);
10941 releasing_vec vec;
10942 if (mo != MEMMODEL_LAST && mo != MEMMODEL_SEQ_CST)
10943 {
10944 fn = builtin_decl_explicit (BUILT_IN_ATOMIC_THREAD_FENCE);
10945 vec->quick_push (build_int_cst (integer_type_node, mo));
10946 }
10947 tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
10948 finish_expr_stmt (stmt);
10949 }
10950
10951 void
finish_omp_taskwait(void)10952 finish_omp_taskwait (void)
10953 {
10954 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKWAIT);
10955 releasing_vec vec;
10956 tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
10957 finish_expr_stmt (stmt);
10958 }
10959
10960 void
finish_omp_taskyield(void)10961 finish_omp_taskyield (void)
10962 {
10963 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKYIELD);
10964 releasing_vec vec;
10965 tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
10966 finish_expr_stmt (stmt);
10967 }
10968
10969 void
finish_omp_cancel(tree clauses)10970 finish_omp_cancel (tree clauses)
10971 {
10972 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
10973 int mask = 0;
10974 if (omp_find_clause (clauses, OMP_CLAUSE_PARALLEL))
10975 mask = 1;
10976 else if (omp_find_clause (clauses, OMP_CLAUSE_FOR))
10977 mask = 2;
10978 else if (omp_find_clause (clauses, OMP_CLAUSE_SECTIONS))
10979 mask = 4;
10980 else if (omp_find_clause (clauses, OMP_CLAUSE_TASKGROUP))
10981 mask = 8;
10982 else
10983 {
10984 error ("%<#pragma omp cancel%> must specify one of "
10985 "%<parallel%>, %<for%>, %<sections%> or %<taskgroup%> clauses");
10986 return;
10987 }
10988 releasing_vec vec;
10989 tree ifc = omp_find_clause (clauses, OMP_CLAUSE_IF);
10990 if (ifc != NULL_TREE)
10991 {
10992 if (OMP_CLAUSE_IF_MODIFIER (ifc) != ERROR_MARK
10993 && OMP_CLAUSE_IF_MODIFIER (ifc) != VOID_CST)
10994 error_at (OMP_CLAUSE_LOCATION (ifc),
10995 "expected %<cancel%> %<if%> clause modifier");
10996 else
10997 {
10998 tree ifc2 = omp_find_clause (OMP_CLAUSE_CHAIN (ifc), OMP_CLAUSE_IF);
10999 if (ifc2 != NULL_TREE)
11000 {
11001 gcc_assert (OMP_CLAUSE_IF_MODIFIER (ifc) == VOID_CST
11002 && OMP_CLAUSE_IF_MODIFIER (ifc2) != ERROR_MARK
11003 && OMP_CLAUSE_IF_MODIFIER (ifc2) != VOID_CST);
11004 error_at (OMP_CLAUSE_LOCATION (ifc2),
11005 "expected %<cancel%> %<if%> clause modifier");
11006 }
11007 }
11008
11009 if (!processing_template_decl)
11010 ifc = maybe_convert_cond (OMP_CLAUSE_IF_EXPR (ifc));
11011 else
11012 ifc = build_x_binary_op (OMP_CLAUSE_LOCATION (ifc), NE_EXPR,
11013 OMP_CLAUSE_IF_EXPR (ifc), ERROR_MARK,
11014 integer_zero_node, ERROR_MARK,
11015 NULL_TREE, NULL, tf_warning_or_error);
11016 }
11017 else
11018 ifc = boolean_true_node;
11019 vec->quick_push (build_int_cst (integer_type_node, mask));
11020 vec->quick_push (ifc);
11021 tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
11022 finish_expr_stmt (stmt);
11023 }
11024
11025 void
finish_omp_cancellation_point(tree clauses)11026 finish_omp_cancellation_point (tree clauses)
11027 {
11028 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_CANCELLATION_POINT);
11029 int mask = 0;
11030 if (omp_find_clause (clauses, OMP_CLAUSE_PARALLEL))
11031 mask = 1;
11032 else if (omp_find_clause (clauses, OMP_CLAUSE_FOR))
11033 mask = 2;
11034 else if (omp_find_clause (clauses, OMP_CLAUSE_SECTIONS))
11035 mask = 4;
11036 else if (omp_find_clause (clauses, OMP_CLAUSE_TASKGROUP))
11037 mask = 8;
11038 else
11039 {
11040 error ("%<#pragma omp cancellation point%> must specify one of "
11041 "%<parallel%>, %<for%>, %<sections%> or %<taskgroup%> clauses");
11042 return;
11043 }
11044 releasing_vec vec
11045 = make_tree_vector_single (build_int_cst (integer_type_node, mask));
11046 tree stmt = finish_call_expr (fn, &vec, false, false, tf_warning_or_error);
11047 finish_expr_stmt (stmt);
11048 }
11049
11050 /* Begin a __transaction_atomic or __transaction_relaxed statement.
11051 If PCOMPOUND is non-null, this is for a function-transaction-block, and we
11052 should create an extra compound stmt. */
11053
11054 tree
begin_transaction_stmt(location_t loc,tree * pcompound,int flags)11055 begin_transaction_stmt (location_t loc, tree *pcompound, int flags)
11056 {
11057 tree r;
11058
11059 if (pcompound)
11060 *pcompound = begin_compound_stmt (0);
11061
11062 r = build_stmt (loc, TRANSACTION_EXPR, NULL_TREE);
11063
11064 /* Only add the statement to the function if support enabled. */
11065 if (flag_tm)
11066 add_stmt (r);
11067 else
11068 error_at (loc, ((flags & TM_STMT_ATTR_RELAXED) != 0
11069 ? G_("%<__transaction_relaxed%> without "
11070 "transactional memory support enabled")
11071 : G_("%<__transaction_atomic%> without "
11072 "transactional memory support enabled")));
11073
11074 TRANSACTION_EXPR_BODY (r) = push_stmt_list ();
11075 TREE_SIDE_EFFECTS (r) = 1;
11076 return r;
11077 }
11078
11079 /* End a __transaction_atomic or __transaction_relaxed statement.
11080 If COMPOUND_STMT is non-null, this is for a function-transaction-block,
11081 and we should end the compound. If NOEX is non-NULL, we wrap the body in
11082 a MUST_NOT_THROW_EXPR with NOEX as condition. */
11083
11084 void
finish_transaction_stmt(tree stmt,tree compound_stmt,int flags,tree noex)11085 finish_transaction_stmt (tree stmt, tree compound_stmt, int flags, tree noex)
11086 {
11087 TRANSACTION_EXPR_BODY (stmt) = pop_stmt_list (TRANSACTION_EXPR_BODY (stmt));
11088 TRANSACTION_EXPR_OUTER (stmt) = (flags & TM_STMT_ATTR_OUTER) != 0;
11089 TRANSACTION_EXPR_RELAXED (stmt) = (flags & TM_STMT_ATTR_RELAXED) != 0;
11090 TRANSACTION_EXPR_IS_STMT (stmt) = 1;
11091
11092 /* noexcept specifications are not allowed for function transactions. */
11093 gcc_assert (!(noex && compound_stmt));
11094 if (noex)
11095 {
11096 tree body = build_must_not_throw_expr (TRANSACTION_EXPR_BODY (stmt),
11097 noex);
11098 protected_set_expr_location
11099 (body, EXPR_LOCATION (TRANSACTION_EXPR_BODY (stmt)));
11100 TREE_SIDE_EFFECTS (body) = 1;
11101 TRANSACTION_EXPR_BODY (stmt) = body;
11102 }
11103
11104 if (compound_stmt)
11105 finish_compound_stmt (compound_stmt);
11106 }
11107
11108 /* Build a __transaction_atomic or __transaction_relaxed expression. If
11109 NOEX is non-NULL, we wrap the body in a MUST_NOT_THROW_EXPR with NOEX as
11110 condition. */
11111
11112 tree
build_transaction_expr(location_t loc,tree expr,int flags,tree noex)11113 build_transaction_expr (location_t loc, tree expr, int flags, tree noex)
11114 {
11115 tree ret;
11116 if (noex)
11117 {
11118 expr = build_must_not_throw_expr (expr, noex);
11119 protected_set_expr_location (expr, loc);
11120 TREE_SIDE_EFFECTS (expr) = 1;
11121 }
11122 ret = build1 (TRANSACTION_EXPR, TREE_TYPE (expr), expr);
11123 if (flags & TM_STMT_ATTR_RELAXED)
11124 TRANSACTION_EXPR_RELAXED (ret) = 1;
11125 TREE_SIDE_EFFECTS (ret) = 1;
11126 SET_EXPR_LOCATION (ret, loc);
11127 return ret;
11128 }
11129
11130 void
init_cp_semantics(void)11131 init_cp_semantics (void)
11132 {
11133 }
11134
11135
11136 /* If we have a condition in conjunctive normal form (CNF), find the first
11137 failing clause. In other words, given an expression like
11138
11139 true && true && false && true && false
11140
11141 return the first 'false'. EXPR is the expression. */
11142
11143 static tree
find_failing_clause_r(tree expr)11144 find_failing_clause_r (tree expr)
11145 {
11146 if (TREE_CODE (expr) == TRUTH_ANDIF_EXPR)
11147 {
11148 /* First check the left side... */
11149 tree e = find_failing_clause_r (TREE_OPERAND (expr, 0));
11150 if (e == NULL_TREE)
11151 /* ...if we didn't find a false clause, check the right side. */
11152 e = find_failing_clause_r (TREE_OPERAND (expr, 1));
11153 return e;
11154 }
11155 tree e = contextual_conv_bool (expr, tf_none);
11156 e = fold_non_dependent_expr (e, tf_none, /*manifestly_const_eval=*/true);
11157 if (integer_zerop (e))
11158 /* This is the failing clause. */
11159 return expr;
11160 return NULL_TREE;
11161 }
11162
11163 /* Wrapper for find_failing_clause_r. */
11164
11165 static tree
find_failing_clause(tree expr)11166 find_failing_clause (tree expr)
11167 {
11168 if (TREE_CODE (expr) == TRUTH_ANDIF_EXPR)
11169 if (tree e = find_failing_clause_r (expr))
11170 expr = e;
11171 return expr;
11172 }
11173
11174 /* Build a STATIC_ASSERT for a static assertion with the condition
11175 CONDITION and the message text MESSAGE. LOCATION is the location
11176 of the static assertion in the source code. When MEMBER_P, this
11177 static assertion is a member of a class. If SHOW_EXPR_P is true,
11178 print the condition (because it was instantiation-dependent). */
11179
11180 void
finish_static_assert(tree condition,tree message,location_t location,bool member_p,bool show_expr_p)11181 finish_static_assert (tree condition, tree message, location_t location,
11182 bool member_p, bool show_expr_p)
11183 {
11184 tsubst_flags_t complain = tf_warning_or_error;
11185
11186 if (message == NULL_TREE
11187 || message == error_mark_node
11188 || condition == NULL_TREE
11189 || condition == error_mark_node)
11190 return;
11191
11192 if (check_for_bare_parameter_packs (condition))
11193 condition = error_mark_node;
11194
11195 if (instantiation_dependent_expression_p (condition))
11196 {
11197 /* We're in a template; build a STATIC_ASSERT and put it in
11198 the right place. */
11199 tree assertion;
11200
11201 assertion = make_node (STATIC_ASSERT);
11202 STATIC_ASSERT_CONDITION (assertion) = condition;
11203 STATIC_ASSERT_MESSAGE (assertion) = message;
11204 STATIC_ASSERT_SOURCE_LOCATION (assertion) = location;
11205
11206 if (member_p)
11207 maybe_add_class_template_decl_list (current_class_type,
11208 assertion,
11209 /*friend_p=*/0);
11210 else
11211 add_stmt (assertion);
11212
11213 return;
11214 }
11215
11216 /* Save the condition in case it was a concept check. */
11217 tree orig_condition = condition;
11218
11219 /* Fold the expression and convert it to a boolean value. */
11220 condition = contextual_conv_bool (condition, complain);
11221 condition = fold_non_dependent_expr (condition, complain,
11222 /*manifestly_const_eval=*/true);
11223
11224 if (TREE_CODE (condition) == INTEGER_CST && !integer_zerop (condition))
11225 /* Do nothing; the condition is satisfied. */
11226 ;
11227 else
11228 {
11229 iloc_sentinel ils (location);
11230
11231 if (integer_zerop (condition))
11232 {
11233 int sz = TREE_INT_CST_LOW (TYPE_SIZE_UNIT
11234 (TREE_TYPE (TREE_TYPE (message))));
11235 int len = TREE_STRING_LENGTH (message) / sz - 1;
11236
11237 /* See if we can find which clause was failing (for logical AND). */
11238 tree bad = find_failing_clause (orig_condition);
11239 /* If not, or its location is unusable, fall back to the previous
11240 location. */
11241 location_t cloc = cp_expr_loc_or_loc (bad, location);
11242 /* Nobody wants to see the artificial (bool) cast. */
11243 bad = tree_strip_nop_conversions (bad);
11244
11245 /* Report the error. */
11246 if (len == 0)
11247 error_at (cloc, "static assertion failed");
11248 else
11249 error_at (cloc, "static assertion failed: %s",
11250 TREE_STRING_POINTER (message));
11251
11252 /* Actually explain the failure if this is a concept check or a
11253 requires-expression. */
11254 if (concept_check_p (bad)
11255 || TREE_CODE (bad) == REQUIRES_EXPR)
11256 diagnose_constraints (location, bad, NULL_TREE);
11257 else if (COMPARISON_CLASS_P (bad)
11258 && ARITHMETIC_TYPE_P (TREE_TYPE (TREE_OPERAND (bad, 0))))
11259 {
11260 tree op0 = fold_non_dependent_expr (TREE_OPERAND (bad, 0));
11261 tree op1 = fold_non_dependent_expr (TREE_OPERAND (bad, 1));
11262 tree cond = build2 (TREE_CODE (bad), boolean_type_node, op0, op1);
11263 inform (cloc, "the comparison reduces to %qE", cond);
11264 }
11265 else if (show_expr_p)
11266 inform (cloc, "%qE evaluates to false", bad);
11267 }
11268 else if (condition && condition != error_mark_node)
11269 {
11270 error ("non-constant condition for static assertion");
11271 if (require_rvalue_constant_expression (condition))
11272 cxx_constant_value (condition);
11273 }
11274 }
11275 }
11276
11277 /* Implements the C++0x decltype keyword. Returns the type of EXPR,
11278 suitable for use as a type-specifier.
11279
11280 ID_EXPRESSION_OR_MEMBER_ACCESS_P is true when EXPR was parsed as an
11281 id-expression or a class member access, FALSE when it was parsed as
11282 a full expression. */
11283
11284 tree
finish_decltype_type(tree expr,bool id_expression_or_member_access_p,tsubst_flags_t complain)11285 finish_decltype_type (tree expr, bool id_expression_or_member_access_p,
11286 tsubst_flags_t complain)
11287 {
11288 tree type = NULL_TREE;
11289
11290 if (!expr || error_operand_p (expr))
11291 return error_mark_node;
11292
11293 if (TYPE_P (expr)
11294 || TREE_CODE (expr) == TYPE_DECL
11295 || (TREE_CODE (expr) == BIT_NOT_EXPR
11296 && TYPE_P (TREE_OPERAND (expr, 0))))
11297 {
11298 if (complain & tf_error)
11299 error ("argument to %<decltype%> must be an expression");
11300 return error_mark_node;
11301 }
11302
11303 /* decltype is an unevaluated context. */
11304 cp_unevaluated u;
11305
11306 processing_template_decl_sentinel ptds (/*reset=*/false);
11307
11308 /* Depending on the resolution of DR 1172, we may later need to distinguish
11309 instantiation-dependent but not type-dependent expressions so that, say,
11310 A<decltype(sizeof(T))>::U doesn't require 'typename'. */
11311 if (instantiation_dependent_uneval_expression_p (expr))
11312 {
11313 type = cxx_make_type (DECLTYPE_TYPE);
11314 DECLTYPE_TYPE_EXPR (type) = expr;
11315 DECLTYPE_TYPE_ID_EXPR_OR_MEMBER_ACCESS_P (type)
11316 = id_expression_or_member_access_p;
11317 SET_TYPE_STRUCTURAL_EQUALITY (type);
11318
11319 return type;
11320 }
11321 else if (processing_template_decl)
11322 {
11323 expr = instantiate_non_dependent_expr_sfinae (expr, complain|tf_decltype);
11324 if (expr == error_mark_node)
11325 return error_mark_node;
11326 /* Keep processing_template_decl cleared for the rest of the function
11327 (for sake of the call to lvalue_kind below, which handles templated
11328 and non-templated COND_EXPR differently). */
11329 processing_template_decl = 0;
11330 }
11331
11332 /* The type denoted by decltype(e) is defined as follows: */
11333
11334 expr = resolve_nondeduced_context (expr, complain);
11335 if (!mark_single_function (expr, complain))
11336 return error_mark_node;
11337
11338 if (invalid_nonstatic_memfn_p (input_location, expr, complain))
11339 return error_mark_node;
11340
11341 if (type_unknown_p (expr))
11342 {
11343 if (complain & tf_error)
11344 error ("%<decltype%> cannot resolve address of overloaded function");
11345 return error_mark_node;
11346 }
11347
11348 /* To get the size of a static data member declared as an array of
11349 unknown bound, we need to instantiate it. */
11350 if (VAR_P (expr)
11351 && VAR_HAD_UNKNOWN_BOUND (expr)
11352 && DECL_TEMPLATE_INSTANTIATION (expr))
11353 instantiate_decl (expr, /*defer_ok*/true, /*expl_inst_mem*/false);
11354
11355 if (id_expression_or_member_access_p)
11356 {
11357 /* If e is an id-expression or a class member access (5.2.5
11358 [expr.ref]), decltype(e) is defined as the type of the entity
11359 named by e. If there is no such entity, or e names a set of
11360 overloaded functions, the program is ill-formed. */
11361 if (identifier_p (expr))
11362 expr = lookup_name (expr);
11363
11364 if (INDIRECT_REF_P (expr)
11365 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
11366 /* This can happen when the expression is, e.g., "a.b". Just
11367 look at the underlying operand. */
11368 expr = TREE_OPERAND (expr, 0);
11369
11370 if (TREE_CODE (expr) == OFFSET_REF
11371 || TREE_CODE (expr) == MEMBER_REF
11372 || TREE_CODE (expr) == SCOPE_REF)
11373 /* We're only interested in the field itself. If it is a
11374 BASELINK, we will need to see through it in the next
11375 step. */
11376 expr = TREE_OPERAND (expr, 1);
11377
11378 if (BASELINK_P (expr))
11379 /* See through BASELINK nodes to the underlying function. */
11380 expr = BASELINK_FUNCTIONS (expr);
11381
11382 /* decltype of a decomposition name drops references in the tuple case
11383 (unlike decltype of a normal variable) and keeps cv-qualifiers from
11384 the containing object in the other cases (unlike decltype of a member
11385 access expression). */
11386 if (DECL_DECOMPOSITION_P (expr))
11387 {
11388 if (DECL_HAS_VALUE_EXPR_P (expr))
11389 /* Expr is an array or struct subobject proxy, handle
11390 bit-fields properly. */
11391 return unlowered_expr_type (expr);
11392 else
11393 /* Expr is a reference variable for the tuple case. */
11394 return lookup_decomp_type (expr);
11395 }
11396
11397 switch (TREE_CODE (expr))
11398 {
11399 case FIELD_DECL:
11400 if (DECL_BIT_FIELD_TYPE (expr))
11401 {
11402 type = DECL_BIT_FIELD_TYPE (expr);
11403 break;
11404 }
11405 /* Fall through for fields that aren't bitfields. */
11406 gcc_fallthrough ();
11407
11408 case FUNCTION_DECL:
11409 case VAR_DECL:
11410 case CONST_DECL:
11411 case PARM_DECL:
11412 case RESULT_DECL:
11413 case TEMPLATE_PARM_INDEX:
11414 expr = mark_type_use (expr);
11415 type = TREE_TYPE (expr);
11416 break;
11417
11418 case ERROR_MARK:
11419 type = error_mark_node;
11420 break;
11421
11422 case COMPONENT_REF:
11423 case COMPOUND_EXPR:
11424 mark_type_use (expr);
11425 type = is_bitfield_expr_with_lowered_type (expr);
11426 if (!type)
11427 type = TREE_TYPE (TREE_OPERAND (expr, 1));
11428 break;
11429
11430 case BIT_FIELD_REF:
11431 gcc_unreachable ();
11432
11433 case INTEGER_CST:
11434 case PTRMEM_CST:
11435 /* We can get here when the id-expression refers to an
11436 enumerator or non-type template parameter. */
11437 type = TREE_TYPE (expr);
11438 break;
11439
11440 default:
11441 /* Handle instantiated template non-type arguments. */
11442 type = TREE_TYPE (expr);
11443 break;
11444 }
11445 }
11446 else
11447 {
11448 /* Within a lambda-expression:
11449
11450 Every occurrence of decltype((x)) where x is a possibly
11451 parenthesized id-expression that names an entity of
11452 automatic storage duration is treated as if x were
11453 transformed into an access to a corresponding data member
11454 of the closure type that would have been declared if x
11455 were a use of the denoted entity. */
11456 if (outer_automatic_var_p (expr)
11457 && current_function_decl
11458 && LAMBDA_FUNCTION_P (current_function_decl))
11459 type = capture_decltype (expr);
11460 else if (error_operand_p (expr))
11461 type = error_mark_node;
11462 else if (expr == current_class_ptr)
11463 /* If the expression is just "this", we want the
11464 cv-unqualified pointer for the "this" type. */
11465 type = TYPE_MAIN_VARIANT (TREE_TYPE (expr));
11466 else
11467 {
11468 /* Otherwise, where T is the type of e, if e is an lvalue,
11469 decltype(e) is defined as T&; if an xvalue, T&&; otherwise, T. */
11470 cp_lvalue_kind clk = lvalue_kind (expr);
11471 type = unlowered_expr_type (expr);
11472 gcc_assert (!TYPE_REF_P (type));
11473
11474 /* For vector types, pick a non-opaque variant. */
11475 if (VECTOR_TYPE_P (type))
11476 type = strip_typedefs (type);
11477
11478 if (clk != clk_none && !(clk & clk_class))
11479 type = cp_build_reference_type (type, (clk & clk_rvalueref));
11480 }
11481 }
11482
11483 return type;
11484 }
11485
11486 /* Called from trait_expr_value to evaluate either __has_nothrow_assign or
11487 __has_nothrow_copy, depending on assign_p. Returns true iff all
11488 the copy {ctor,assign} fns are nothrow. */
11489
11490 static bool
classtype_has_nothrow_assign_or_copy_p(tree type,bool assign_p)11491 classtype_has_nothrow_assign_or_copy_p (tree type, bool assign_p)
11492 {
11493 tree fns = NULL_TREE;
11494
11495 if (assign_p || TYPE_HAS_COPY_CTOR (type))
11496 fns = get_class_binding (type, assign_p ? assign_op_identifier
11497 : ctor_identifier);
11498
11499 bool saw_copy = false;
11500 for (ovl_iterator iter (fns); iter; ++iter)
11501 {
11502 tree fn = *iter;
11503
11504 if (copy_fn_p (fn) > 0)
11505 {
11506 saw_copy = true;
11507 if (!maybe_instantiate_noexcept (fn)
11508 || !TYPE_NOTHROW_P (TREE_TYPE (fn)))
11509 return false;
11510 }
11511 }
11512
11513 return saw_copy;
11514 }
11515
11516 /* Return true if DERIVED is pointer interconvertible base of BASE. */
11517
11518 static bool
pointer_interconvertible_base_of_p(tree base,tree derived)11519 pointer_interconvertible_base_of_p (tree base, tree derived)
11520 {
11521 if (base == error_mark_node || derived == error_mark_node)
11522 return false;
11523 base = TYPE_MAIN_VARIANT (base);
11524 derived = TYPE_MAIN_VARIANT (derived);
11525 if (!NON_UNION_CLASS_TYPE_P (base)
11526 || !NON_UNION_CLASS_TYPE_P (derived))
11527 return false;
11528
11529 if (same_type_p (base, derived))
11530 return true;
11531
11532 if (!std_layout_type_p (derived))
11533 return false;
11534
11535 return uniquely_derived_from_p (base, derived);
11536 }
11537
11538 /* Helper function for fold_builtin_is_pointer_inverconvertible_with_class,
11539 return true if MEMBERTYPE is the type of the first non-static data member
11540 of TYPE or for unions of any members. */
11541 static bool
first_nonstatic_data_member_p(tree type,tree membertype)11542 first_nonstatic_data_member_p (tree type, tree membertype)
11543 {
11544 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
11545 {
11546 if (TREE_CODE (field) != FIELD_DECL)
11547 continue;
11548 if (DECL_FIELD_IS_BASE (field) && is_empty_field (field))
11549 continue;
11550 if (DECL_FIELD_IS_BASE (field))
11551 return first_nonstatic_data_member_p (TREE_TYPE (field), membertype);
11552 if (ANON_AGGR_TYPE_P (TREE_TYPE (field)))
11553 {
11554 if ((TREE_CODE (TREE_TYPE (field)) == UNION_TYPE
11555 || std_layout_type_p (TREE_TYPE (field)))
11556 && first_nonstatic_data_member_p (TREE_TYPE (field), membertype))
11557 return true;
11558 }
11559 else if (same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (field),
11560 membertype))
11561 return true;
11562 if (TREE_CODE (type) != UNION_TYPE)
11563 return false;
11564 }
11565 return false;
11566 }
11567
11568 /* Fold __builtin_is_pointer_interconvertible_with_class call. */
11569
11570 tree
fold_builtin_is_pointer_inverconvertible_with_class(location_t loc,int nargs,tree * args)11571 fold_builtin_is_pointer_inverconvertible_with_class (location_t loc, int nargs,
11572 tree *args)
11573 {
11574 /* Unless users call the builtin directly, the following 3 checks should be
11575 ensured from std::is_pointer_interconvertible_with_class function
11576 template. */
11577 if (nargs != 1)
11578 {
11579 error_at (loc, "%<__builtin_is_pointer_interconvertible_with_class%> "
11580 "needs a single argument");
11581 return boolean_false_node;
11582 }
11583 tree arg = args[0];
11584 if (error_operand_p (arg))
11585 return boolean_false_node;
11586 if (!TYPE_PTRMEM_P (TREE_TYPE (arg)))
11587 {
11588 error_at (loc, "%<__builtin_is_pointer_interconvertible_with_class%> "
11589 "argument is not pointer to member");
11590 return boolean_false_node;
11591 }
11592
11593 if (!TYPE_PTRDATAMEM_P (TREE_TYPE (arg)))
11594 return boolean_false_node;
11595
11596 tree membertype = TREE_TYPE (TREE_TYPE (arg));
11597 tree basetype = TYPE_OFFSET_BASETYPE (TREE_TYPE (arg));
11598 if (!complete_type_or_else (basetype, NULL_TREE))
11599 return boolean_false_node;
11600
11601 if (TREE_CODE (basetype) != UNION_TYPE
11602 && !std_layout_type_p (basetype))
11603 return boolean_false_node;
11604
11605 if (!first_nonstatic_data_member_p (basetype, membertype))
11606 return boolean_false_node;
11607
11608 if (TREE_CODE (arg) == PTRMEM_CST)
11609 arg = cplus_expand_constant (arg);
11610
11611 if (integer_nonzerop (arg))
11612 return boolean_false_node;
11613 if (integer_zerop (arg))
11614 return boolean_true_node;
11615
11616 return fold_build2 (EQ_EXPR, boolean_type_node, arg,
11617 build_zero_cst (TREE_TYPE (arg)));
11618 }
11619
11620 /* Helper function for is_corresponding_member_aggr. Return true if
11621 MEMBERTYPE pointer-to-data-member ARG can be found in anonymous
11622 union or structure BASETYPE. */
11623
11624 static bool
is_corresponding_member_union(tree basetype,tree membertype,tree arg)11625 is_corresponding_member_union (tree basetype, tree membertype, tree arg)
11626 {
11627 for (tree field = TYPE_FIELDS (basetype); field; field = DECL_CHAIN (field))
11628 if (TREE_CODE (field) != FIELD_DECL || DECL_BIT_FIELD_TYPE (field))
11629 continue;
11630 else if (same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (field),
11631 membertype))
11632 {
11633 if (TREE_CODE (arg) != INTEGER_CST
11634 || tree_int_cst_equal (arg, byte_position (field)))
11635 return true;
11636 }
11637 else if (ANON_AGGR_TYPE_P (TREE_TYPE (field)))
11638 {
11639 tree narg = arg;
11640 if (TREE_CODE (basetype) != UNION_TYPE
11641 && TREE_CODE (narg) == INTEGER_CST)
11642 narg = size_binop (MINUS_EXPR, arg, byte_position (field));
11643 if (is_corresponding_member_union (TREE_TYPE (field),
11644 membertype, narg))
11645 return true;
11646 }
11647 return false;
11648 }
11649
11650 /* Helper function for fold_builtin_is_corresponding_member call.
11651 Return boolean_false_node if MEMBERTYPE1 BASETYPE1::*ARG1 and
11652 MEMBERTYPE2 BASETYPE2::*ARG2 aren't corresponding members,
11653 boolean_true_node if they are corresponding members, or for
11654 non-constant ARG2 the highest member offset for corresponding
11655 members. */
11656
11657 static tree
is_corresponding_member_aggr(location_t loc,tree basetype1,tree membertype1,tree arg1,tree basetype2,tree membertype2,tree arg2)11658 is_corresponding_member_aggr (location_t loc, tree basetype1, tree membertype1,
11659 tree arg1, tree basetype2, tree membertype2,
11660 tree arg2)
11661 {
11662 tree field1 = TYPE_FIELDS (basetype1);
11663 tree field2 = TYPE_FIELDS (basetype2);
11664 tree ret = boolean_false_node;
11665 while (1)
11666 {
11667 bool r = next_common_initial_sequence (field1, field2);
11668 if (field1 == NULL_TREE || field2 == NULL_TREE)
11669 break;
11670 if (r
11671 && same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (field1),
11672 membertype1)
11673 && same_type_ignoring_top_level_qualifiers_p (TREE_TYPE (field2),
11674 membertype2))
11675 {
11676 tree pos = byte_position (field1);
11677 if (TREE_CODE (arg1) == INTEGER_CST
11678 && tree_int_cst_equal (arg1, pos))
11679 {
11680 if (TREE_CODE (arg2) == INTEGER_CST)
11681 return boolean_true_node;
11682 return pos;
11683 }
11684 else if (TREE_CODE (arg1) != INTEGER_CST)
11685 ret = pos;
11686 }
11687 else if (ANON_AGGR_TYPE_P (TREE_TYPE (field1))
11688 && ANON_AGGR_TYPE_P (TREE_TYPE (field2)))
11689 {
11690 if ((!lookup_attribute ("no_unique_address",
11691 DECL_ATTRIBUTES (field1)))
11692 != !lookup_attribute ("no_unique_address",
11693 DECL_ATTRIBUTES (field2)))
11694 break;
11695 if (!tree_int_cst_equal (bit_position (field1),
11696 bit_position (field2)))
11697 break;
11698 bool overlap = true;
11699 tree pos = byte_position (field1);
11700 if (TREE_CODE (arg1) == INTEGER_CST)
11701 {
11702 tree off1 = fold_convert (sizetype, arg1);
11703 tree sz1 = TYPE_SIZE_UNIT (TREE_TYPE (field1));
11704 if (tree_int_cst_lt (off1, pos)
11705 || tree_int_cst_le (size_binop (PLUS_EXPR, pos, sz1), off1))
11706 overlap = false;
11707 }
11708 if (TREE_CODE (arg2) == INTEGER_CST)
11709 {
11710 tree off2 = fold_convert (sizetype, arg2);
11711 tree sz2 = TYPE_SIZE_UNIT (TREE_TYPE (field2));
11712 if (tree_int_cst_lt (off2, pos)
11713 || tree_int_cst_le (size_binop (PLUS_EXPR, pos, sz2), off2))
11714 overlap = false;
11715 }
11716 if (overlap
11717 && NON_UNION_CLASS_TYPE_P (TREE_TYPE (field1))
11718 && NON_UNION_CLASS_TYPE_P (TREE_TYPE (field2)))
11719 {
11720 tree narg1 = arg1;
11721 if (TREE_CODE (arg1) == INTEGER_CST)
11722 narg1 = size_binop (MINUS_EXPR,
11723 fold_convert (sizetype, arg1), pos);
11724 tree narg2 = arg2;
11725 if (TREE_CODE (arg2) == INTEGER_CST)
11726 narg2 = size_binop (MINUS_EXPR,
11727 fold_convert (sizetype, arg2), pos);
11728 tree t1 = TREE_TYPE (field1);
11729 tree t2 = TREE_TYPE (field2);
11730 tree nret = is_corresponding_member_aggr (loc, t1, membertype1,
11731 narg1, t2, membertype2,
11732 narg2);
11733 if (nret != boolean_false_node)
11734 {
11735 if (nret == boolean_true_node)
11736 return nret;
11737 if (TREE_CODE (arg1) == INTEGER_CST)
11738 return size_binop (PLUS_EXPR, nret, pos);
11739 ret = size_binop (PLUS_EXPR, nret, pos);
11740 }
11741 }
11742 else if (overlap
11743 && TREE_CODE (TREE_TYPE (field1)) == UNION_TYPE
11744 && TREE_CODE (TREE_TYPE (field2)) == UNION_TYPE)
11745 {
11746 tree narg1 = arg1;
11747 if (TREE_CODE (arg1) == INTEGER_CST)
11748 narg1 = size_binop (MINUS_EXPR,
11749 fold_convert (sizetype, arg1), pos);
11750 tree narg2 = arg2;
11751 if (TREE_CODE (arg2) == INTEGER_CST)
11752 narg2 = size_binop (MINUS_EXPR,
11753 fold_convert (sizetype, arg2), pos);
11754 if (is_corresponding_member_union (TREE_TYPE (field1),
11755 membertype1, narg1)
11756 && is_corresponding_member_union (TREE_TYPE (field2),
11757 membertype2, narg2))
11758 {
11759 sorry_at (loc, "%<__builtin_is_corresponding_member%> "
11760 "not well defined for anonymous unions");
11761 return boolean_false_node;
11762 }
11763 }
11764 }
11765 if (!r)
11766 break;
11767 field1 = DECL_CHAIN (field1);
11768 field2 = DECL_CHAIN (field2);
11769 }
11770 return ret;
11771 }
11772
11773 /* Fold __builtin_is_corresponding_member call. */
11774
11775 tree
fold_builtin_is_corresponding_member(location_t loc,int nargs,tree * args)11776 fold_builtin_is_corresponding_member (location_t loc, int nargs,
11777 tree *args)
11778 {
11779 /* Unless users call the builtin directly, the following 3 checks should be
11780 ensured from std::is_corresponding_member function template. */
11781 if (nargs != 2)
11782 {
11783 error_at (loc, "%<__builtin_is_corresponding_member%> "
11784 "needs two arguments");
11785 return boolean_false_node;
11786 }
11787 tree arg1 = args[0];
11788 tree arg2 = args[1];
11789 if (error_operand_p (arg1) || error_operand_p (arg2))
11790 return boolean_false_node;
11791 if (!TYPE_PTRMEM_P (TREE_TYPE (arg1))
11792 || !TYPE_PTRMEM_P (TREE_TYPE (arg2)))
11793 {
11794 error_at (loc, "%<__builtin_is_corresponding_member%> "
11795 "argument is not pointer to member");
11796 return boolean_false_node;
11797 }
11798
11799 if (!TYPE_PTRDATAMEM_P (TREE_TYPE (arg1))
11800 || !TYPE_PTRDATAMEM_P (TREE_TYPE (arg2)))
11801 return boolean_false_node;
11802
11803 tree membertype1 = TREE_TYPE (TREE_TYPE (arg1));
11804 tree basetype1 = TYPE_OFFSET_BASETYPE (TREE_TYPE (arg1));
11805 if (!complete_type_or_else (basetype1, NULL_TREE))
11806 return boolean_false_node;
11807
11808 tree membertype2 = TREE_TYPE (TREE_TYPE (arg2));
11809 tree basetype2 = TYPE_OFFSET_BASETYPE (TREE_TYPE (arg2));
11810 if (!complete_type_or_else (basetype2, NULL_TREE))
11811 return boolean_false_node;
11812
11813 if (!NON_UNION_CLASS_TYPE_P (basetype1)
11814 || !NON_UNION_CLASS_TYPE_P (basetype2)
11815 || !std_layout_type_p (basetype1)
11816 || !std_layout_type_p (basetype2))
11817 return boolean_false_node;
11818
11819 /* If the member types aren't layout compatible, then they
11820 can't be corresponding members. */
11821 if (!layout_compatible_type_p (membertype1, membertype2))
11822 return boolean_false_node;
11823
11824 if (TREE_CODE (arg1) == PTRMEM_CST)
11825 arg1 = cplus_expand_constant (arg1);
11826 if (TREE_CODE (arg2) == PTRMEM_CST)
11827 arg2 = cplus_expand_constant (arg2);
11828
11829 if (null_member_pointer_value_p (arg1)
11830 || null_member_pointer_value_p (arg2))
11831 return boolean_false_node;
11832
11833 if (TREE_CODE (arg1) == INTEGER_CST
11834 && TREE_CODE (arg2) == INTEGER_CST
11835 && !tree_int_cst_equal (arg1, arg2))
11836 return boolean_false_node;
11837
11838 if (TREE_CODE (arg2) == INTEGER_CST
11839 && TREE_CODE (arg1) != INTEGER_CST)
11840 {
11841 std::swap (arg1, arg2);
11842 std::swap (membertype1, membertype2);
11843 std::swap (basetype1, basetype2);
11844 }
11845
11846 tree ret = is_corresponding_member_aggr (loc, basetype1, membertype1, arg1,
11847 basetype2, membertype2, arg2);
11848 if (TREE_TYPE (ret) == boolean_type_node)
11849 return ret;
11850 /* If both arg1 and arg2 are INTEGER_CSTs, is_corresponding_member_aggr
11851 already returns boolean_{true,false}_node whether those particular
11852 members are corresponding members or not. Otherwise, if only
11853 one of them is INTEGER_CST (canonicalized to first being INTEGER_CST
11854 above), it returns boolean_false_node if it is certainly not a
11855 corresponding member and otherwise we need to do a runtime check that
11856 those two OFFSET_TYPE offsets are equal.
11857 If neither of the operands is INTEGER_CST, is_corresponding_member_aggr
11858 returns the largest offset at which the members would be corresponding
11859 members, so perform arg1 <= ret && arg1 == arg2 runtime check. */
11860 gcc_assert (TREE_CODE (arg2) != INTEGER_CST);
11861 if (TREE_CODE (arg1) == INTEGER_CST)
11862 return fold_build2 (EQ_EXPR, boolean_type_node, arg1,
11863 fold_convert (TREE_TYPE (arg1), arg2));
11864 ret = fold_build2 (LE_EXPR, boolean_type_node,
11865 fold_convert (pointer_sized_int_node, arg1),
11866 fold_convert (pointer_sized_int_node, ret));
11867 return fold_build2 (TRUTH_AND_EXPR, boolean_type_node, ret,
11868 fold_build2 (EQ_EXPR, boolean_type_node, arg1,
11869 fold_convert (TREE_TYPE (arg1), arg2)));
11870 }
11871
11872 /* Actually evaluates the trait. */
11873
11874 static bool
trait_expr_value(cp_trait_kind kind,tree type1,tree type2)11875 trait_expr_value (cp_trait_kind kind, tree type1, tree type2)
11876 {
11877 enum tree_code type_code1;
11878 tree t;
11879
11880 type_code1 = TREE_CODE (type1);
11881
11882 switch (kind)
11883 {
11884 case CPTK_HAS_NOTHROW_ASSIGN:
11885 type1 = strip_array_types (type1);
11886 return (!CP_TYPE_CONST_P (type1) && type_code1 != REFERENCE_TYPE
11887 && (trait_expr_value (CPTK_HAS_TRIVIAL_ASSIGN, type1, type2)
11888 || (CLASS_TYPE_P (type1)
11889 && classtype_has_nothrow_assign_or_copy_p (type1,
11890 true))));
11891
11892 case CPTK_HAS_TRIVIAL_ASSIGN:
11893 /* ??? The standard seems to be missing the "or array of such a class
11894 type" wording for this trait. */
11895 type1 = strip_array_types (type1);
11896 return (!CP_TYPE_CONST_P (type1) && type_code1 != REFERENCE_TYPE
11897 && (trivial_type_p (type1)
11898 || (CLASS_TYPE_P (type1)
11899 && TYPE_HAS_TRIVIAL_COPY_ASSIGN (type1))));
11900
11901 case CPTK_HAS_NOTHROW_CONSTRUCTOR:
11902 type1 = strip_array_types (type1);
11903 return (trait_expr_value (CPTK_HAS_TRIVIAL_CONSTRUCTOR, type1, type2)
11904 || (CLASS_TYPE_P (type1)
11905 && (t = locate_ctor (type1))
11906 && maybe_instantiate_noexcept (t)
11907 && TYPE_NOTHROW_P (TREE_TYPE (t))));
11908
11909 case CPTK_HAS_TRIVIAL_CONSTRUCTOR:
11910 type1 = strip_array_types (type1);
11911 return (trivial_type_p (type1)
11912 || (CLASS_TYPE_P (type1) && TYPE_HAS_TRIVIAL_DFLT (type1)));
11913
11914 case CPTK_HAS_NOTHROW_COPY:
11915 type1 = strip_array_types (type1);
11916 return (trait_expr_value (CPTK_HAS_TRIVIAL_COPY, type1, type2)
11917 || (CLASS_TYPE_P (type1)
11918 && classtype_has_nothrow_assign_or_copy_p (type1, false)));
11919
11920 case CPTK_HAS_TRIVIAL_COPY:
11921 /* ??? The standard seems to be missing the "or array of such a class
11922 type" wording for this trait. */
11923 type1 = strip_array_types (type1);
11924 return (trivial_type_p (type1) || type_code1 == REFERENCE_TYPE
11925 || (CLASS_TYPE_P (type1) && TYPE_HAS_TRIVIAL_COPY_CTOR (type1)));
11926
11927 case CPTK_HAS_TRIVIAL_DESTRUCTOR:
11928 type1 = strip_array_types (type1);
11929 return (trivial_type_p (type1) || type_code1 == REFERENCE_TYPE
11930 || (CLASS_TYPE_P (type1)
11931 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type1)));
11932
11933 case CPTK_HAS_VIRTUAL_DESTRUCTOR:
11934 return type_has_virtual_destructor (type1);
11935
11936 case CPTK_HAS_UNIQUE_OBJ_REPRESENTATIONS:
11937 return type_has_unique_obj_representations (type1);
11938
11939 case CPTK_IS_ABSTRACT:
11940 return ABSTRACT_CLASS_TYPE_P (type1);
11941
11942 case CPTK_IS_AGGREGATE:
11943 return CP_AGGREGATE_TYPE_P (type1);
11944
11945 case CPTK_IS_BASE_OF:
11946 return (NON_UNION_CLASS_TYPE_P (type1) && NON_UNION_CLASS_TYPE_P (type2)
11947 && (same_type_ignoring_top_level_qualifiers_p (type1, type2)
11948 || DERIVED_FROM_P (type1, type2)));
11949
11950 case CPTK_IS_CLASS:
11951 return NON_UNION_CLASS_TYPE_P (type1);
11952
11953 case CPTK_IS_EMPTY:
11954 return NON_UNION_CLASS_TYPE_P (type1) && CLASSTYPE_EMPTY_P (type1);
11955
11956 case CPTK_IS_ENUM:
11957 return type_code1 == ENUMERAL_TYPE;
11958
11959 case CPTK_IS_FINAL:
11960 return CLASS_TYPE_P (type1) && CLASSTYPE_FINAL (type1);
11961
11962 case CPTK_IS_LAYOUT_COMPATIBLE:
11963 return layout_compatible_type_p (type1, type2);
11964
11965 case CPTK_IS_LITERAL_TYPE:
11966 return literal_type_p (type1);
11967
11968 case CPTK_IS_POINTER_INTERCONVERTIBLE_BASE_OF:
11969 return pointer_interconvertible_base_of_p (type1, type2);
11970
11971 case CPTK_IS_POD:
11972 return pod_type_p (type1);
11973
11974 case CPTK_IS_POLYMORPHIC:
11975 return CLASS_TYPE_P (type1) && TYPE_POLYMORPHIC_P (type1);
11976
11977 case CPTK_IS_SAME_AS:
11978 return same_type_p (type1, type2);
11979
11980 case CPTK_IS_STD_LAYOUT:
11981 return std_layout_type_p (type1);
11982
11983 case CPTK_IS_TRIVIAL:
11984 return trivial_type_p (type1);
11985
11986 case CPTK_IS_TRIVIALLY_ASSIGNABLE:
11987 return is_trivially_xible (MODIFY_EXPR, type1, type2);
11988
11989 case CPTK_IS_TRIVIALLY_CONSTRUCTIBLE:
11990 return is_trivially_xible (INIT_EXPR, type1, type2);
11991
11992 case CPTK_IS_TRIVIALLY_COPYABLE:
11993 return trivially_copyable_p (type1);
11994
11995 case CPTK_IS_UNION:
11996 return type_code1 == UNION_TYPE;
11997
11998 case CPTK_IS_ASSIGNABLE:
11999 return is_xible (MODIFY_EXPR, type1, type2);
12000
12001 case CPTK_IS_CONSTRUCTIBLE:
12002 return is_xible (INIT_EXPR, type1, type2);
12003
12004 case CPTK_IS_NOTHROW_ASSIGNABLE:
12005 return is_nothrow_xible (MODIFY_EXPR, type1, type2);
12006
12007 case CPTK_IS_NOTHROW_CONSTRUCTIBLE:
12008 return is_nothrow_xible (INIT_EXPR, type1, type2);
12009
12010 default:
12011 gcc_unreachable ();
12012 return false;
12013 }
12014 }
12015
12016 /* If TYPE is an array of unknown bound, or (possibly cv-qualified)
12017 void, or a complete type, returns true, otherwise false. */
12018
12019 static bool
check_trait_type(tree type)12020 check_trait_type (tree type)
12021 {
12022 if (type == NULL_TREE)
12023 return true;
12024
12025 if (TREE_CODE (type) == TREE_LIST)
12026 return (check_trait_type (TREE_VALUE (type))
12027 && check_trait_type (TREE_CHAIN (type)));
12028
12029 if (TREE_CODE (type) == ARRAY_TYPE && !TYPE_DOMAIN (type)
12030 && COMPLETE_TYPE_P (TREE_TYPE (type)))
12031 return true;
12032
12033 if (VOID_TYPE_P (type))
12034 return true;
12035
12036 return !!complete_type_or_else (strip_array_types (type), NULL_TREE);
12037 }
12038
12039 /* True iff the conversion (if any) would be a direct reference
12040 binding, not requiring complete types. This is LWG2939. */
12041
12042 static bool
same_type_ref_bind_p(cp_trait_kind kind,tree type1,tree type2)12043 same_type_ref_bind_p (cp_trait_kind kind, tree type1, tree type2)
12044 {
12045 tree from, to;
12046 switch (kind)
12047 {
12048 /* These put the target type first. */
12049 case CPTK_IS_CONSTRUCTIBLE:
12050 case CPTK_IS_NOTHROW_CONSTRUCTIBLE:
12051 case CPTK_IS_TRIVIALLY_CONSTRUCTIBLE:
12052 to = type1;
12053 from = type2;
12054 break;
12055
12056 default:
12057 gcc_unreachable ();
12058 }
12059
12060 if (TREE_CODE (to) != REFERENCE_TYPE || !from)
12061 return false;
12062 if (TREE_CODE (from) == TREE_VEC && TREE_VEC_LENGTH (from) == 1)
12063 from = TREE_VEC_ELT (from, 0);
12064 else if (TREE_CODE (from) == TREE_LIST && !TREE_CHAIN (from))
12065 from = TREE_VALUE (from);
12066 return (TYPE_P (from)
12067 && (same_type_ignoring_top_level_qualifiers_p
12068 (non_reference (to), non_reference (from))));
12069 }
12070
12071 /* Process a trait expression. */
12072
12073 tree
finish_trait_expr(location_t loc,cp_trait_kind kind,tree type1,tree type2)12074 finish_trait_expr (location_t loc, cp_trait_kind kind, tree type1, tree type2)
12075 {
12076 if (type1 == error_mark_node
12077 || type2 == error_mark_node)
12078 return error_mark_node;
12079
12080 if (processing_template_decl)
12081 {
12082 tree trait_expr = make_node (TRAIT_EXPR);
12083 TREE_TYPE (trait_expr) = boolean_type_node;
12084 TRAIT_EXPR_TYPE1 (trait_expr) = type1;
12085 TRAIT_EXPR_TYPE2 (trait_expr) = type2;
12086 TRAIT_EXPR_KIND (trait_expr) = kind;
12087 TRAIT_EXPR_LOCATION (trait_expr) = loc;
12088 return trait_expr;
12089 }
12090
12091 switch (kind)
12092 {
12093 case CPTK_HAS_NOTHROW_ASSIGN:
12094 case CPTK_HAS_TRIVIAL_ASSIGN:
12095 case CPTK_HAS_NOTHROW_CONSTRUCTOR:
12096 case CPTK_HAS_TRIVIAL_CONSTRUCTOR:
12097 case CPTK_HAS_NOTHROW_COPY:
12098 case CPTK_HAS_TRIVIAL_COPY:
12099 case CPTK_HAS_TRIVIAL_DESTRUCTOR:
12100 case CPTK_HAS_UNIQUE_OBJ_REPRESENTATIONS:
12101 case CPTK_HAS_VIRTUAL_DESTRUCTOR:
12102 case CPTK_IS_ABSTRACT:
12103 case CPTK_IS_AGGREGATE:
12104 case CPTK_IS_EMPTY:
12105 case CPTK_IS_FINAL:
12106 case CPTK_IS_LITERAL_TYPE:
12107 case CPTK_IS_POD:
12108 case CPTK_IS_POLYMORPHIC:
12109 case CPTK_IS_STD_LAYOUT:
12110 case CPTK_IS_TRIVIAL:
12111 case CPTK_IS_TRIVIALLY_COPYABLE:
12112 if (!check_trait_type (type1))
12113 return error_mark_node;
12114 break;
12115
12116 case CPTK_IS_ASSIGNABLE:
12117 case CPTK_IS_CONSTRUCTIBLE:
12118 break;
12119
12120 case CPTK_IS_TRIVIALLY_CONSTRUCTIBLE:
12121 case CPTK_IS_NOTHROW_CONSTRUCTIBLE:
12122 /* Don't check completeness for direct reference binding. */;
12123 if (same_type_ref_bind_p (kind, type1, type2))
12124 break;
12125 gcc_fallthrough ();
12126
12127 case CPTK_IS_NOTHROW_ASSIGNABLE:
12128 case CPTK_IS_TRIVIALLY_ASSIGNABLE:
12129 if (!check_trait_type (type1)
12130 || !check_trait_type (type2))
12131 return error_mark_node;
12132 break;
12133
12134 case CPTK_IS_BASE_OF:
12135 case CPTK_IS_POINTER_INTERCONVERTIBLE_BASE_OF:
12136 if (NON_UNION_CLASS_TYPE_P (type1) && NON_UNION_CLASS_TYPE_P (type2)
12137 && !same_type_ignoring_top_level_qualifiers_p (type1, type2)
12138 && !complete_type_or_else (type2, NULL_TREE))
12139 /* We already issued an error. */
12140 return error_mark_node;
12141 break;
12142
12143 case CPTK_IS_CLASS:
12144 case CPTK_IS_ENUM:
12145 case CPTK_IS_UNION:
12146 case CPTK_IS_SAME_AS:
12147 break;
12148
12149 case CPTK_IS_LAYOUT_COMPATIBLE:
12150 if (!array_of_unknown_bound_p (type1)
12151 && TREE_CODE (type1) != VOID_TYPE
12152 && !complete_type_or_else (type1, NULL_TREE))
12153 /* We already issued an error. */
12154 return error_mark_node;
12155 if (!array_of_unknown_bound_p (type2)
12156 && TREE_CODE (type2) != VOID_TYPE
12157 && !complete_type_or_else (type2, NULL_TREE))
12158 /* We already issued an error. */
12159 return error_mark_node;
12160 break;
12161
12162 default:
12163 gcc_unreachable ();
12164 }
12165
12166 tree val = (trait_expr_value (kind, type1, type2)
12167 ? boolean_true_node : boolean_false_node);
12168 return maybe_wrap_with_location (val, loc);
12169 }
12170
12171 /* Do-nothing variants of functions to handle pragma FLOAT_CONST_DECIMAL64,
12172 which is ignored for C++. */
12173
12174 void
set_float_const_decimal64(void)12175 set_float_const_decimal64 (void)
12176 {
12177 }
12178
12179 void
clear_float_const_decimal64(void)12180 clear_float_const_decimal64 (void)
12181 {
12182 }
12183
12184 bool
float_const_decimal64_p(void)12185 float_const_decimal64_p (void)
12186 {
12187 return 0;
12188 }
12189
12190
12191 /* Return true if T designates the implied `this' parameter. */
12192
12193 bool
is_this_parameter(tree t)12194 is_this_parameter (tree t)
12195 {
12196 if (!DECL_P (t) || DECL_NAME (t) != this_identifier)
12197 return false;
12198 gcc_assert (TREE_CODE (t) == PARM_DECL
12199 || (TREE_CODE (t) == VAR_DECL && DECL_HAS_VALUE_EXPR_P (t))
12200 || (cp_binding_oracle && TREE_CODE (t) == VAR_DECL));
12201 return true;
12202 }
12203
12204 /* Insert the deduced return type for an auto function. */
12205
12206 void
apply_deduced_return_type(tree fco,tree return_type)12207 apply_deduced_return_type (tree fco, tree return_type)
12208 {
12209 tree result;
12210
12211 if (return_type == error_mark_node)
12212 return;
12213
12214 if (DECL_CONV_FN_P (fco))
12215 DECL_NAME (fco) = make_conv_op_name (return_type);
12216
12217 TREE_TYPE (fco) = change_return_type (return_type, TREE_TYPE (fco));
12218
12219 result = DECL_RESULT (fco);
12220 if (result == NULL_TREE)
12221 return;
12222 if (TREE_TYPE (result) == return_type)
12223 return;
12224
12225 if (!processing_template_decl && !VOID_TYPE_P (return_type)
12226 && !complete_type_or_else (return_type, NULL_TREE))
12227 return;
12228
12229 /* We already have a DECL_RESULT from start_preparsed_function.
12230 Now we need to redo the work it and allocate_struct_function
12231 did to reflect the new type. */
12232 gcc_assert (current_function_decl == fco);
12233 result = build_decl (input_location, RESULT_DECL, NULL_TREE,
12234 TYPE_MAIN_VARIANT (return_type));
12235 DECL_ARTIFICIAL (result) = 1;
12236 DECL_IGNORED_P (result) = 1;
12237 cp_apply_type_quals_to_decl (cp_type_quals (return_type),
12238 result);
12239
12240 DECL_RESULT (fco) = result;
12241
12242 if (!processing_template_decl)
12243 {
12244 bool aggr = aggregate_value_p (result, fco);
12245 #ifdef PCC_STATIC_STRUCT_RETURN
12246 cfun->returns_pcc_struct = aggr;
12247 #endif
12248 cfun->returns_struct = aggr;
12249 }
12250 }
12251
12252 /* DECL is a local variable or parameter from the surrounding scope of a
12253 lambda-expression. Returns the decltype for a use of the capture field
12254 for DECL even if it hasn't been captured yet. */
12255
12256 static tree
capture_decltype(tree decl)12257 capture_decltype (tree decl)
12258 {
12259 tree lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
12260 tree cap = lookup_name (DECL_NAME (decl), LOOK_where::BLOCK,
12261 LOOK_want::HIDDEN_LAMBDA);
12262 tree type;
12263
12264 if (cap && is_capture_proxy (cap))
12265 type = TREE_TYPE (cap);
12266 else
12267 switch (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam))
12268 {
12269 case CPLD_NONE:
12270 error ("%qD is not captured", decl);
12271 return error_mark_node;
12272
12273 case CPLD_COPY:
12274 type = TREE_TYPE (decl);
12275 if (TYPE_REF_P (type)
12276 && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
12277 type = TREE_TYPE (type);
12278 break;
12279
12280 case CPLD_REFERENCE:
12281 type = TREE_TYPE (decl);
12282 if (!TYPE_REF_P (type))
12283 type = build_reference_type (TREE_TYPE (decl));
12284 break;
12285
12286 default:
12287 gcc_unreachable ();
12288 }
12289
12290 if (!TYPE_REF_P (type))
12291 {
12292 if (!LAMBDA_EXPR_MUTABLE_P (lam))
12293 type = cp_build_qualified_type (type, (cp_type_quals (type)
12294 |TYPE_QUAL_CONST));
12295 type = build_reference_type (type);
12296 }
12297 return type;
12298 }
12299
12300 /* Build a unary fold expression of EXPR over OP. If IS_RIGHT is true,
12301 this is a right unary fold. Otherwise it is a left unary fold. */
12302
12303 static tree
finish_unary_fold_expr(tree expr,int op,tree_code dir)12304 finish_unary_fold_expr (tree expr, int op, tree_code dir)
12305 {
12306 /* Build a pack expansion (assuming expr has pack type). */
12307 if (!uses_parameter_packs (expr))
12308 {
12309 error_at (location_of (expr), "operand of fold expression has no "
12310 "unexpanded parameter packs");
12311 return error_mark_node;
12312 }
12313 tree pack = make_pack_expansion (expr);
12314
12315 /* Build the fold expression. */
12316 tree code = build_int_cstu (integer_type_node, abs (op));
12317 tree fold = build_min_nt_loc (input_location, dir, code, pack);
12318 FOLD_EXPR_MODIFY_P (fold) = (op < 0);
12319 TREE_TYPE (fold) = build_dependent_operator_type (NULL_TREE,
12320 FOLD_EXPR_OP (fold),
12321 FOLD_EXPR_MODIFY_P (fold));
12322 return fold;
12323 }
12324
12325 tree
finish_left_unary_fold_expr(tree expr,int op)12326 finish_left_unary_fold_expr (tree expr, int op)
12327 {
12328 return finish_unary_fold_expr (expr, op, UNARY_LEFT_FOLD_EXPR);
12329 }
12330
12331 tree
finish_right_unary_fold_expr(tree expr,int op)12332 finish_right_unary_fold_expr (tree expr, int op)
12333 {
12334 return finish_unary_fold_expr (expr, op, UNARY_RIGHT_FOLD_EXPR);
12335 }
12336
12337 /* Build a binary fold expression over EXPR1 and EXPR2. The
12338 associativity of the fold is determined by EXPR1 and EXPR2 (whichever
12339 has an unexpanded parameter pack). */
12340
12341 tree
finish_binary_fold_expr(tree pack,tree init,int op,tree_code dir)12342 finish_binary_fold_expr (tree pack, tree init, int op, tree_code dir)
12343 {
12344 pack = make_pack_expansion (pack);
12345 tree code = build_int_cstu (integer_type_node, abs (op));
12346 tree fold = build_min_nt_loc (input_location, dir, code, pack, init);
12347 FOLD_EXPR_MODIFY_P (fold) = (op < 0);
12348 TREE_TYPE (fold) = build_dependent_operator_type (NULL_TREE,
12349 FOLD_EXPR_OP (fold),
12350 FOLD_EXPR_MODIFY_P (fold));
12351 return fold;
12352 }
12353
12354 tree
finish_binary_fold_expr(tree expr1,tree expr2,int op)12355 finish_binary_fold_expr (tree expr1, tree expr2, int op)
12356 {
12357 // Determine which expr has an unexpanded parameter pack and
12358 // set the pack and initial term.
12359 bool pack1 = uses_parameter_packs (expr1);
12360 bool pack2 = uses_parameter_packs (expr2);
12361 if (pack1 && !pack2)
12362 return finish_binary_fold_expr (expr1, expr2, op, BINARY_RIGHT_FOLD_EXPR);
12363 else if (pack2 && !pack1)
12364 return finish_binary_fold_expr (expr2, expr1, op, BINARY_LEFT_FOLD_EXPR);
12365 else
12366 {
12367 if (pack1)
12368 error ("both arguments in binary fold have unexpanded parameter packs");
12369 else
12370 error ("no unexpanded parameter packs in binary fold");
12371 }
12372 return error_mark_node;
12373 }
12374
12375 /* Finish __builtin_launder (arg). */
12376
12377 tree
finish_builtin_launder(location_t loc,tree arg,tsubst_flags_t complain)12378 finish_builtin_launder (location_t loc, tree arg, tsubst_flags_t complain)
12379 {
12380 tree orig_arg = arg;
12381 if (!type_dependent_expression_p (arg))
12382 arg = decay_conversion (arg, complain);
12383 if (error_operand_p (arg))
12384 return error_mark_node;
12385 if (!type_dependent_expression_p (arg)
12386 && !TYPE_PTR_P (TREE_TYPE (arg)))
12387 {
12388 error_at (loc, "non-pointer argument to %<__builtin_launder%>");
12389 return error_mark_node;
12390 }
12391 if (processing_template_decl)
12392 arg = orig_arg;
12393 return build_call_expr_internal_loc (loc, IFN_LAUNDER,
12394 TREE_TYPE (arg), 1, arg);
12395 }
12396
12397 /* Finish __builtin_convertvector (arg, type). */
12398
12399 tree
cp_build_vec_convert(tree arg,location_t loc,tree type,tsubst_flags_t complain)12400 cp_build_vec_convert (tree arg, location_t loc, tree type,
12401 tsubst_flags_t complain)
12402 {
12403 if (error_operand_p (type))
12404 return error_mark_node;
12405 if (error_operand_p (arg))
12406 return error_mark_node;
12407
12408 tree ret = NULL_TREE;
12409 if (!type_dependent_expression_p (arg) && !dependent_type_p (type))
12410 ret = c_build_vec_convert (cp_expr_loc_or_input_loc (arg),
12411 decay_conversion (arg, complain),
12412 loc, type, (complain & tf_error) != 0);
12413
12414 if (!processing_template_decl)
12415 return ret;
12416
12417 return build_call_expr_internal_loc (loc, IFN_VEC_CONVERT, type, 1, arg);
12418 }
12419
12420 /* Finish __builtin_bit_cast (type, arg). */
12421
12422 tree
cp_build_bit_cast(location_t loc,tree type,tree arg,tsubst_flags_t complain)12423 cp_build_bit_cast (location_t loc, tree type, tree arg,
12424 tsubst_flags_t complain)
12425 {
12426 if (error_operand_p (type))
12427 return error_mark_node;
12428 if (!dependent_type_p (type))
12429 {
12430 if (!complete_type_or_maybe_complain (type, NULL_TREE, complain))
12431 return error_mark_node;
12432 if (TREE_CODE (type) == ARRAY_TYPE)
12433 {
12434 /* std::bit_cast for destination ARRAY_TYPE is not possible,
12435 as functions may not return an array, so don't bother trying
12436 to support this (and then deal with VLAs etc.). */
12437 error_at (loc, "%<__builtin_bit_cast%> destination type %qT "
12438 "is an array type", type);
12439 return error_mark_node;
12440 }
12441 if (!trivially_copyable_p (type))
12442 {
12443 error_at (loc, "%<__builtin_bit_cast%> destination type %qT "
12444 "is not trivially copyable", type);
12445 return error_mark_node;
12446 }
12447 }
12448
12449 if (error_operand_p (arg))
12450 return error_mark_node;
12451
12452 if (!type_dependent_expression_p (arg))
12453 {
12454 if (TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE)
12455 {
12456 /* Don't perform array-to-pointer conversion. */
12457 arg = mark_rvalue_use (arg, loc, true);
12458 if (!complete_type_or_maybe_complain (TREE_TYPE (arg), arg, complain))
12459 return error_mark_node;
12460 }
12461 else
12462 arg = decay_conversion (arg, complain);
12463
12464 if (error_operand_p (arg))
12465 return error_mark_node;
12466
12467 if (!trivially_copyable_p (TREE_TYPE (arg)))
12468 {
12469 error_at (cp_expr_loc_or_loc (arg, loc),
12470 "%<__builtin_bit_cast%> source type %qT "
12471 "is not trivially copyable", TREE_TYPE (arg));
12472 return error_mark_node;
12473 }
12474 if (!dependent_type_p (type)
12475 && !cp_tree_equal (TYPE_SIZE_UNIT (type),
12476 TYPE_SIZE_UNIT (TREE_TYPE (arg))))
12477 {
12478 error_at (loc, "%<__builtin_bit_cast%> source size %qE "
12479 "not equal to destination type size %qE",
12480 TYPE_SIZE_UNIT (TREE_TYPE (arg)),
12481 TYPE_SIZE_UNIT (type));
12482 return error_mark_node;
12483 }
12484 }
12485
12486 tree ret = build_min (BIT_CAST_EXPR, type, arg);
12487 SET_EXPR_LOCATION (ret, loc);
12488
12489 if (!processing_template_decl && CLASS_TYPE_P (type))
12490 ret = get_target_expr_sfinae (ret, complain);
12491
12492 return ret;
12493 }
12494
12495 #include "gt-cp-semantics.h"
12496