1 /* C++-specific tree lowering bits; see also c-gimplify.cc and gimple.cc.
2
3 Copyright (C) 2002-2022 Free Software Foundation, Inc.
4 Contributed by Jason Merrill <jason@redhat.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "target.h"
26 #include "basic-block.h"
27 #include "cp-tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "stor-layout.h"
31 #include "tree-iterator.h"
32 #include "gimplify.h"
33 #include "c-family/c-ubsan.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "asan.h"
37 #include "gcc-rich-location.h"
38 #include "memmodel.h"
39 #include "tm_p.h"
40 #include "output.h"
41 #include "file-prefix-map.h"
42 #include "cgraph.h"
43 #include "omp-general.h"
44 #include "opts.h"
45
46 struct cp_fold_data
47 {
48 hash_set<tree> pset;
49 bool genericize; // called from cp_fold_function?
50
cp_fold_datacp_fold_data51 cp_fold_data (bool g): genericize (g) {}
52 };
53
54 /* Forward declarations. */
55
56 static tree cp_genericize_r (tree *, int *, void *);
57 static tree cp_fold_r (tree *, int *, void *);
58 static void cp_genericize_tree (tree*, bool);
59 static tree cp_fold (tree);
60
61 /* Genericize a TRY_BLOCK. */
62
63 static void
genericize_try_block(tree * stmt_p)64 genericize_try_block (tree *stmt_p)
65 {
66 tree body = TRY_STMTS (*stmt_p);
67 tree cleanup = TRY_HANDLERS (*stmt_p);
68
69 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup);
70 }
71
72 /* Genericize a HANDLER by converting to a CATCH_EXPR. */
73
74 static void
genericize_catch_block(tree * stmt_p)75 genericize_catch_block (tree *stmt_p)
76 {
77 tree type = HANDLER_TYPE (*stmt_p);
78 tree body = HANDLER_BODY (*stmt_p);
79
80 /* FIXME should the caught type go in TREE_TYPE? */
81 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body);
82 }
83
84 /* A terser interface for building a representation of an exception
85 specification. */
86
87 static tree
build_gimple_eh_filter_tree(tree body,tree allowed,tree failure)88 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure)
89 {
90 tree t;
91
92 /* FIXME should the allowed types go in TREE_TYPE? */
93 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
94 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
95
96 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
97 append_to_statement_list (body, &TREE_OPERAND (t, 0));
98
99 return t;
100 }
101
102 /* Genericize an EH_SPEC_BLOCK by converting it to a
103 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */
104
105 static void
genericize_eh_spec_block(tree * stmt_p)106 genericize_eh_spec_block (tree *stmt_p)
107 {
108 tree body = EH_SPEC_STMTS (*stmt_p);
109 tree allowed = EH_SPEC_RAISES (*stmt_p);
110 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ());
111
112 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure);
113 suppress_warning (*stmt_p);
114 suppress_warning (TREE_OPERAND (*stmt_p, 1));
115 }
116
117 /* Return the first non-compound statement in STMT. */
118
119 tree
first_stmt(tree stmt)120 first_stmt (tree stmt)
121 {
122 switch (TREE_CODE (stmt))
123 {
124 case STATEMENT_LIST:
125 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt))
126 return first_stmt (p->stmt);
127 return void_node;
128
129 case BIND_EXPR:
130 return first_stmt (BIND_EXPR_BODY (stmt));
131
132 default:
133 return stmt;
134 }
135 }
136
137 /* Genericize an IF_STMT by turning it into a COND_EXPR. */
138
139 static void
genericize_if_stmt(tree * stmt_p)140 genericize_if_stmt (tree *stmt_p)
141 {
142 tree stmt, cond, then_, else_;
143 location_t locus = EXPR_LOCATION (*stmt_p);
144
145 stmt = *stmt_p;
146 cond = IF_COND (stmt);
147 then_ = THEN_CLAUSE (stmt);
148 else_ = ELSE_CLAUSE (stmt);
149
150 if (then_ && else_)
151 {
152 tree ft = first_stmt (then_);
153 tree fe = first_stmt (else_);
154 br_predictor pr;
155 if (TREE_CODE (ft) == PREDICT_EXPR
156 && TREE_CODE (fe) == PREDICT_EXPR
157 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe)
158 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL))
159 {
160 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus));
161 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus));
162 warning_at (&richloc, OPT_Wattributes,
163 "both branches of %<if%> statement marked as %qs",
164 pr == PRED_HOT_LABEL ? "likely" : "unlikely");
165 }
166 }
167
168 if (!then_)
169 then_ = build_empty_stmt (locus);
170 if (!else_)
171 else_ = build_empty_stmt (locus);
172
173 /* consteval if has been verified not to have the then_/else_ blocks
174 entered by gotos/case labels from elsewhere, and as then_ block
175 can contain unfolded immediate function calls, we have to discard
176 the then_ block regardless of whether else_ has side-effects or not. */
177 if (IF_STMT_CONSTEVAL_P (stmt))
178 {
179 if (block_may_fallthru (then_))
180 stmt = build3 (COND_EXPR, void_type_node, boolean_false_node,
181 void_node, else_);
182 else
183 stmt = else_;
184 }
185 else if (IF_STMT_CONSTEXPR_P (stmt))
186 stmt = integer_nonzerop (cond) ? then_ : else_;
187 /* ??? This optimization doesn't seem to belong here, but removing it
188 causes -Wreturn-type regressions (e.g. 107310). */
189 else if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_))
190 stmt = then_;
191 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_))
192 stmt = else_;
193 else
194 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_);
195 protected_set_expr_location_if_unset (stmt, locus);
196 *stmt_p = stmt;
197 }
198
199 /* Hook into the middle of gimplifying an OMP_FOR node. */
200
201 static enum gimplify_status
cp_gimplify_omp_for(tree * expr_p,gimple_seq * pre_p)202 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
203 {
204 tree for_stmt = *expr_p;
205 gimple_seq seq = NULL;
206
207 /* Protect ourselves from recursion. */
208 if (OMP_FOR_GIMPLIFYING_P (for_stmt))
209 return GS_UNHANDLED;
210 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1;
211
212 gimplify_and_add (for_stmt, &seq);
213 gimple_seq_add_seq (pre_p, seq);
214
215 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0;
216
217 return GS_ALL_DONE;
218 }
219
220 /* Gimplify an EXPR_STMT node. */
221
222 static void
gimplify_expr_stmt(tree * stmt_p)223 gimplify_expr_stmt (tree *stmt_p)
224 {
225 tree stmt = EXPR_STMT_EXPR (*stmt_p);
226
227 if (stmt == error_mark_node)
228 stmt = NULL;
229
230 /* Gimplification of a statement expression will nullify the
231 statement if all its side effects are moved to *PRE_P and *POST_P.
232
233 In this case we will not want to emit the gimplified statement.
234 However, we may still want to emit a warning, so we do that before
235 gimplification. */
236 if (stmt && warn_unused_value)
237 {
238 if (!TREE_SIDE_EFFECTS (stmt))
239 {
240 if (!IS_EMPTY_STMT (stmt)
241 && !VOID_TYPE_P (TREE_TYPE (stmt))
242 && !warning_suppressed_p (stmt, OPT_Wunused_value))
243 warning (OPT_Wunused_value, "statement with no effect");
244 }
245 else
246 warn_if_unused_value (stmt, input_location);
247 }
248
249 if (stmt == NULL_TREE)
250 stmt = alloc_stmt_list ();
251
252 *stmt_p = stmt;
253 }
254
255 /* Gimplify initialization from an AGGR_INIT_EXPR. */
256
257 static void
cp_gimplify_init_expr(tree * expr_p)258 cp_gimplify_init_expr (tree *expr_p)
259 {
260 tree from = TREE_OPERAND (*expr_p, 1);
261 tree to = TREE_OPERAND (*expr_p, 0);
262 tree t;
263
264 if (TREE_CODE (from) == TARGET_EXPR)
265 if (tree init = TARGET_EXPR_INITIAL (from))
266 {
267 if (target_expr_needs_replace (from))
268 {
269 /* If this was changed by cp_genericize_target_expr, we need to
270 walk into it to replace uses of the slot. */
271 replace_decl (&init, TARGET_EXPR_SLOT (from), to);
272 *expr_p = init;
273 return;
274 }
275 else
276 from = init;
277 }
278
279 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them
280 inside the TARGET_EXPR. */
281 for (t = from; t; )
282 {
283 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t;
284
285 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and
286 replace the slot operand with our target.
287
288 Should we add a target parm to gimplify_expr instead? No, as in this
289 case we want to replace the INIT_EXPR. */
290 if (TREE_CODE (sub) == AGGR_INIT_EXPR
291 || TREE_CODE (sub) == VEC_INIT_EXPR)
292 {
293 if (TREE_CODE (sub) == AGGR_INIT_EXPR)
294 AGGR_INIT_EXPR_SLOT (sub) = to;
295 else
296 VEC_INIT_EXPR_SLOT (sub) = to;
297 *expr_p = from;
298
299 /* The initialization is now a side-effect, so the container can
300 become void. */
301 if (from != sub)
302 TREE_TYPE (from) = void_type_node;
303 }
304
305 /* Handle aggregate NSDMI. */
306 replace_placeholders (sub, to);
307
308 if (t == sub)
309 break;
310 else
311 t = TREE_OPERAND (t, 1);
312 }
313
314 }
315
316 /* Gimplify a MUST_NOT_THROW_EXPR. */
317
318 static enum gimplify_status
gimplify_must_not_throw_expr(tree * expr_p,gimple_seq * pre_p)319 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p)
320 {
321 tree stmt = *expr_p;
322 tree temp = voidify_wrapper_expr (stmt, NULL);
323 tree body = TREE_OPERAND (stmt, 0);
324 gimple_seq try_ = NULL;
325 gimple_seq catch_ = NULL;
326 gimple *mnt;
327
328 gimplify_and_add (body, &try_);
329 mnt = gimple_build_eh_must_not_throw (terminate_fn);
330 gimple_seq_add_stmt_without_update (&catch_, mnt);
331 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH);
332
333 gimple_seq_add_stmt_without_update (pre_p, mnt);
334 if (temp)
335 {
336 *expr_p = temp;
337 return GS_OK;
338 }
339
340 *expr_p = NULL;
341 return GS_ALL_DONE;
342 }
343
344 /* Return TRUE if an operand (OP) of a given TYPE being copied is
345 really just an empty class copy.
346
347 Check that the operand has a simple form so that TARGET_EXPRs and
348 non-empty CONSTRUCTORs get reduced properly, and we leave the
349 return slot optimization alone because it isn't a copy. */
350
351 bool
simple_empty_class_p(tree type,tree op,tree_code code)352 simple_empty_class_p (tree type, tree op, tree_code code)
353 {
354 if (TREE_CODE (op) == COMPOUND_EXPR)
355 return simple_empty_class_p (type, TREE_OPERAND (op, 1), code);
356 if (SIMPLE_TARGET_EXPR_P (op)
357 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type))
358 /* The TARGET_EXPR is itself a simple copy, look through it. */
359 return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code);
360
361 if (TREE_CODE (op) == PARM_DECL
362 && TREE_ADDRESSABLE (TREE_TYPE (op)))
363 {
364 tree fn = DECL_CONTEXT (op);
365 if (DECL_THUNK_P (fn)
366 || lambda_static_thunk_p (fn))
367 /* In a thunk, we pass through invisible reference parms, so this isn't
368 actually a copy. */
369 return false;
370 }
371
372 return
373 (TREE_CODE (op) == EMPTY_CLASS_EXPR
374 || code == MODIFY_EXPR
375 || is_gimple_lvalue (op)
376 || INDIRECT_REF_P (op)
377 || (TREE_CODE (op) == CONSTRUCTOR
378 && CONSTRUCTOR_NELTS (op) == 0)
379 || (TREE_CODE (op) == CALL_EXPR
380 && !CALL_EXPR_RETURN_SLOT_OPT (op)))
381 && !TREE_CLOBBER_P (op)
382 && is_really_empty_class (type, /*ignore_vptr*/true);
383 }
384
385 /* Returns true if evaluating E as an lvalue has side-effects;
386 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really
387 have side-effects until there is a read or write through it. */
388
389 static bool
lvalue_has_side_effects(tree e)390 lvalue_has_side_effects (tree e)
391 {
392 if (!TREE_SIDE_EFFECTS (e))
393 return false;
394 while (handled_component_p (e))
395 {
396 if (TREE_CODE (e) == ARRAY_REF
397 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1)))
398 return true;
399 e = TREE_OPERAND (e, 0);
400 }
401 if (DECL_P (e))
402 /* Just naming a variable has no side-effects. */
403 return false;
404 else if (INDIRECT_REF_P (e))
405 /* Similarly, indirection has no side-effects. */
406 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0));
407 else
408 /* For anything else, trust TREE_SIDE_EFFECTS. */
409 return TREE_SIDE_EFFECTS (e);
410 }
411
412 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified
413 by expressions with side-effects in other operands. */
414
415 static enum gimplify_status
gimplify_to_rvalue(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool (* gimple_test_f)(tree))416 gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
417 bool (*gimple_test_f) (tree))
418 {
419 enum gimplify_status t
420 = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue);
421 if (t == GS_ERROR)
422 return GS_ERROR;
423 else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME)
424 *expr_p = get_initialized_tmp_var (*expr_p, pre_p);
425 return t;
426 }
427
428 /* Like gimplify_arg, but if ORDERED is set (which should be set if
429 any of the arguments this argument is sequenced before has
430 TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type
431 are gimplified into SSA_NAME or a fresh temporary and for
432 non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */
433
434 static enum gimplify_status
cp_gimplify_arg(tree * arg_p,gimple_seq * pre_p,location_t call_location,bool ordered)435 cp_gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
436 bool ordered)
437 {
438 enum gimplify_status t;
439 if (ordered
440 && !is_gimple_reg_type (TREE_TYPE (*arg_p))
441 && TREE_CODE (*arg_p) == TARGET_EXPR)
442 {
443 /* gimplify_arg would strip away the TARGET_EXPR, but
444 that can mean we don't copy the argument and some following
445 argument with side-effect could modify it. */
446 protected_set_expr_location (*arg_p, call_location);
447 return gimplify_expr (arg_p, pre_p, NULL, is_gimple_lvalue, fb_either);
448 }
449 else
450 {
451 t = gimplify_arg (arg_p, pre_p, call_location);
452 if (t == GS_ERROR)
453 return GS_ERROR;
454 else if (ordered
455 && is_gimple_reg_type (TREE_TYPE (*arg_p))
456 && is_gimple_variable (*arg_p)
457 && TREE_CODE (*arg_p) != SSA_NAME
458 /* No need to force references into register, references
459 can't be modified. */
460 && !TYPE_REF_P (TREE_TYPE (*arg_p))
461 /* And this can't be modified either. */
462 && *arg_p != current_class_ptr)
463 *arg_p = get_initialized_tmp_var (*arg_p, pre_p);
464 return t;
465 }
466
467 }
468
469 /* Do C++-specific gimplification. Args are as for gimplify_expr. */
470
471 int
cp_gimplify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)472 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
473 {
474 int saved_stmts_are_full_exprs_p = 0;
475 location_t loc = cp_expr_loc_or_input_loc (*expr_p);
476 enum tree_code code = TREE_CODE (*expr_p);
477 enum gimplify_status ret;
478
479 if (STATEMENT_CODE_P (code))
480 {
481 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p ();
482 current_stmt_tree ()->stmts_are_full_exprs_p
483 = STMT_IS_FULL_EXPR_P (*expr_p);
484 }
485
486 switch (code)
487 {
488 case AGGR_INIT_EXPR:
489 simplify_aggr_init_expr (expr_p);
490 ret = GS_OK;
491 break;
492
493 case VEC_INIT_EXPR:
494 {
495 *expr_p = expand_vec_init_expr (NULL_TREE, *expr_p,
496 tf_warning_or_error);
497
498 cp_fold_data data (/*genericize*/true);
499 cp_walk_tree (expr_p, cp_fold_r, &data, NULL);
500 cp_genericize_tree (expr_p, false);
501 copy_if_shared (expr_p);
502 ret = GS_OK;
503 }
504 break;
505
506 case THROW_EXPR:
507 /* FIXME communicate throw type to back end, probably by moving
508 THROW_EXPR into ../tree.def. */
509 *expr_p = TREE_OPERAND (*expr_p, 0);
510 ret = GS_OK;
511 break;
512
513 case MUST_NOT_THROW_EXPR:
514 ret = gimplify_must_not_throw_expr (expr_p, pre_p);
515 break;
516
517 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the
518 LHS of an assignment might also be involved in the RHS, as in bug
519 25979. */
520 case INIT_EXPR:
521 cp_gimplify_init_expr (expr_p);
522 if (TREE_CODE (*expr_p) != INIT_EXPR)
523 return GS_OK;
524 /* Fall through. */
525 case MODIFY_EXPR:
526 modify_expr_case:
527 {
528 /* If the back end isn't clever enough to know that the lhs and rhs
529 types are the same, add an explicit conversion. */
530 tree op0 = TREE_OPERAND (*expr_p, 0);
531 tree op1 = TREE_OPERAND (*expr_p, 1);
532
533 if (!error_operand_p (op0)
534 && !error_operand_p (op1)
535 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0))
536 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1)))
537 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0)))
538 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
539 TREE_TYPE (op0), op1);
540
541 else if (simple_empty_class_p (TREE_TYPE (op0), op1, code))
542 {
543 while (TREE_CODE (op1) == TARGET_EXPR)
544 /* We're disconnecting the initializer from its target,
545 don't create a temporary. */
546 op1 = TARGET_EXPR_INITIAL (op1);
547
548 /* Remove any copies of empty classes. Also drop volatile
549 variables on the RHS to avoid infinite recursion from
550 gimplify_expr trying to load the value. */
551 if (TREE_SIDE_EFFECTS (op1))
552 {
553 if (TREE_THIS_VOLATILE (op1)
554 && (REFERENCE_CLASS_P (op1) || DECL_P (op1)))
555 op1 = build_fold_addr_expr (op1);
556
557 gimplify_and_add (op1, pre_p);
558 }
559 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
560 is_gimple_lvalue, fb_lvalue);
561 *expr_p = TREE_OPERAND (*expr_p, 0);
562 if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p))
563 /* Avoid 'return *<retval>;' */
564 *expr_p = TREE_OPERAND (*expr_p, 0);
565 }
566 /* P0145 says that the RHS is sequenced before the LHS.
567 gimplify_modify_expr gimplifies the RHS before the LHS, but that
568 isn't quite strong enough in two cases:
569
570 1) gimplify.cc wants to leave a CALL_EXPR on the RHS, which would
571 mean it's evaluated after the LHS.
572
573 2) the value calculation of the RHS is also sequenced before the
574 LHS, so for scalar assignment we need to preevaluate if the
575 RHS could be affected by LHS side-effects even if it has no
576 side-effects of its own. We don't need this for classes because
577 class assignment takes its RHS by reference. */
578 else if (flag_strong_eval_order > 1
579 && TREE_CODE (*expr_p) == MODIFY_EXPR
580 && lvalue_has_side_effects (op0)
581 && (TREE_CODE (op1) == CALL_EXPR
582 || (SCALAR_TYPE_P (TREE_TYPE (op1))
583 && !TREE_CONSTANT (op1))))
584 TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p);
585 }
586 ret = GS_OK;
587 break;
588
589 case EMPTY_CLASS_EXPR:
590 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */
591 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL);
592 ret = GS_OK;
593 break;
594
595 case BASELINK:
596 *expr_p = BASELINK_FUNCTIONS (*expr_p);
597 ret = GS_OK;
598 break;
599
600 case TRY_BLOCK:
601 genericize_try_block (expr_p);
602 ret = GS_OK;
603 break;
604
605 case HANDLER:
606 genericize_catch_block (expr_p);
607 ret = GS_OK;
608 break;
609
610 case EH_SPEC_BLOCK:
611 genericize_eh_spec_block (expr_p);
612 ret = GS_OK;
613 break;
614
615 case USING_STMT:
616 gcc_unreachable ();
617
618 case FOR_STMT:
619 case WHILE_STMT:
620 case DO_STMT:
621 case SWITCH_STMT:
622 case CONTINUE_STMT:
623 case BREAK_STMT:
624 gcc_unreachable ();
625
626 case OMP_FOR:
627 case OMP_SIMD:
628 case OMP_DISTRIBUTE:
629 case OMP_LOOP:
630 case OMP_TASKLOOP:
631 ret = cp_gimplify_omp_for (expr_p, pre_p);
632 break;
633
634 case EXPR_STMT:
635 gimplify_expr_stmt (expr_p);
636 ret = GS_OK;
637 break;
638
639 case UNARY_PLUS_EXPR:
640 {
641 tree arg = TREE_OPERAND (*expr_p, 0);
642 tree type = TREE_TYPE (*expr_p);
643 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg)
644 : arg;
645 ret = GS_OK;
646 }
647 break;
648
649 case CALL_EXPR:
650 ret = GS_OK;
651 if (flag_strong_eval_order == 2
652 && CALL_EXPR_FN (*expr_p)
653 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)
654 && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE)
655 {
656 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
657 enum gimplify_status t
658 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
659 is_gimple_call_addr);
660 if (t == GS_ERROR)
661 ret = GS_ERROR;
662 /* GIMPLE considers most pointer conversion useless, but for
663 calls we actually care about the exact function pointer type. */
664 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype)
665 CALL_EXPR_FN (*expr_p)
666 = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p));
667 }
668 if (!CALL_EXPR_FN (*expr_p))
669 /* Internal function call. */;
670 else if (CALL_EXPR_REVERSE_ARGS (*expr_p))
671 {
672 /* This is a call to a (compound) assignment operator that used
673 the operator syntax; gimplify the RHS first. */
674 gcc_assert (call_expr_nargs (*expr_p) == 2);
675 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p));
676 enum gimplify_status t
677 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc,
678 TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, 0)));
679 if (t == GS_ERROR)
680 ret = GS_ERROR;
681 }
682 else if (CALL_EXPR_ORDERED_ARGS (*expr_p))
683 {
684 /* Leave the last argument for gimplify_call_expr, to avoid problems
685 with __builtin_va_arg_pack(). */
686 int nargs = call_expr_nargs (*expr_p) - 1;
687 int last_side_effects_arg = -1;
688 for (int i = nargs; i > 0; --i)
689 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
690 {
691 last_side_effects_arg = i;
692 break;
693 }
694 for (int i = 0; i < nargs; ++i)
695 {
696 enum gimplify_status t
697 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc,
698 i < last_side_effects_arg);
699 if (t == GS_ERROR)
700 ret = GS_ERROR;
701 }
702 }
703 else if (flag_strong_eval_order
704 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p))
705 {
706 /* If flag_strong_eval_order, evaluate the object argument first. */
707 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
708 if (INDIRECT_TYPE_P (fntype))
709 fntype = TREE_TYPE (fntype);
710 if (TREE_CODE (fntype) == METHOD_TYPE)
711 {
712 int nargs = call_expr_nargs (*expr_p);
713 bool side_effects = false;
714 for (int i = 1; i < nargs; ++i)
715 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i)))
716 {
717 side_effects = true;
718 break;
719 }
720 enum gimplify_status t
721 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc,
722 side_effects);
723 if (t == GS_ERROR)
724 ret = GS_ERROR;
725 }
726 }
727 if (ret != GS_ERROR)
728 {
729 tree decl = cp_get_callee_fndecl_nofold (*expr_p);
730 if (decl && fndecl_built_in_p (decl, BUILT_IN_FRONTEND))
731 switch (DECL_FE_FUNCTION_CODE (decl))
732 {
733 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
734 *expr_p = boolean_false_node;
735 break;
736 case CP_BUILT_IN_SOURCE_LOCATION:
737 *expr_p
738 = fold_builtin_source_location (EXPR_LOCATION (*expr_p));
739 break;
740 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
741 *expr_p
742 = fold_builtin_is_corresponding_member
743 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
744 &CALL_EXPR_ARG (*expr_p, 0));
745 break;
746 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
747 *expr_p
748 = fold_builtin_is_pointer_inverconvertible_with_class
749 (EXPR_LOCATION (*expr_p), call_expr_nargs (*expr_p),
750 &CALL_EXPR_ARG (*expr_p, 0));
751 break;
752 default:
753 break;
754 }
755 }
756 break;
757
758 case TARGET_EXPR:
759 /* A TARGET_EXPR that expresses direct-initialization should have been
760 elided by cp_gimplify_init_expr. */
761 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p));
762 ret = GS_UNHANDLED;
763 break;
764
765 case PTRMEM_CST:
766 *expr_p = cplus_expand_constant (*expr_p);
767 if (TREE_CODE (*expr_p) == PTRMEM_CST)
768 ret = GS_ERROR;
769 else
770 ret = GS_OK;
771 break;
772
773 case RETURN_EXPR:
774 if (TREE_OPERAND (*expr_p, 0)
775 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR
776 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR))
777 {
778 expr_p = &TREE_OPERAND (*expr_p, 0);
779 /* Avoid going through the INIT_EXPR case, which can
780 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */
781 goto modify_expr_case;
782 }
783 /* Fall through. */
784
785 default:
786 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p);
787 break;
788 }
789
790 /* Restore saved state. */
791 if (STATEMENT_CODE_P (code))
792 current_stmt_tree ()->stmts_are_full_exprs_p
793 = saved_stmts_are_full_exprs_p;
794
795 return ret;
796 }
797
798 static inline bool
is_invisiref_parm(const_tree t)799 is_invisiref_parm (const_tree t)
800 {
801 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
802 && DECL_BY_REFERENCE (t));
803 }
804
805 /* A stable comparison routine for use with splay trees and DECLs. */
806
807 static int
splay_tree_compare_decl_uid(splay_tree_key xa,splay_tree_key xb)808 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
809 {
810 tree a = (tree) xa;
811 tree b = (tree) xb;
812
813 return DECL_UID (a) - DECL_UID (b);
814 }
815
816 /* OpenMP context during genericization. */
817
818 struct cp_genericize_omp_taskreg
819 {
820 bool is_parallel;
821 bool default_shared;
822 struct cp_genericize_omp_taskreg *outer;
823 splay_tree variables;
824 };
825
826 /* Return true if genericization should try to determine if
827 DECL is firstprivate or shared within task regions. */
828
829 static bool
omp_var_to_track(tree decl)830 omp_var_to_track (tree decl)
831 {
832 tree type = TREE_TYPE (decl);
833 if (is_invisiref_parm (decl))
834 type = TREE_TYPE (type);
835 else if (TYPE_REF_P (type))
836 type = TREE_TYPE (type);
837 while (TREE_CODE (type) == ARRAY_TYPE)
838 type = TREE_TYPE (type);
839 if (type == error_mark_node || !CLASS_TYPE_P (type))
840 return false;
841 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl))
842 return false;
843 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
844 return false;
845 return true;
846 }
847
848 /* Note DECL use in OpenMP region OMP_CTX during genericization. */
849
850 static void
omp_cxx_notice_variable(struct cp_genericize_omp_taskreg * omp_ctx,tree decl)851 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl)
852 {
853 splay_tree_node n = splay_tree_lookup (omp_ctx->variables,
854 (splay_tree_key) decl);
855 if (n == NULL)
856 {
857 int flags = OMP_CLAUSE_DEFAULT_SHARED;
858 if (omp_ctx->outer)
859 omp_cxx_notice_variable (omp_ctx->outer, decl);
860 if (!omp_ctx->default_shared)
861 {
862 struct cp_genericize_omp_taskreg *octx;
863
864 for (octx = omp_ctx->outer; octx; octx = octx->outer)
865 {
866 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
867 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED)
868 {
869 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
870 break;
871 }
872 if (octx->is_parallel)
873 break;
874 }
875 if (octx == NULL
876 && (TREE_CODE (decl) == PARM_DECL
877 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl))
878 && DECL_CONTEXT (decl) == current_function_decl)))
879 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
880 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
881 {
882 /* DECL is implicitly determined firstprivate in
883 the current task construct. Ensure copy ctor and
884 dtor are instantiated, because during gimplification
885 it will be already too late. */
886 tree type = TREE_TYPE (decl);
887 if (is_invisiref_parm (decl))
888 type = TREE_TYPE (type);
889 else if (TYPE_REF_P (type))
890 type = TREE_TYPE (type);
891 while (TREE_CODE (type) == ARRAY_TYPE)
892 type = TREE_TYPE (type);
893 get_copy_ctor (type, tf_none);
894 get_dtor (type, tf_none);
895 }
896 }
897 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags);
898 }
899 }
900
901 /* If we might need to clean up a partially constructed object, break down the
902 CONSTRUCTOR with split_nonconstant_init. Also expand VEC_INIT_EXPR at this
903 point. If initializing TO with FROM is non-trivial, overwrite *REPLACE with
904 the result. */
905
906 static void
cp_genericize_init(tree * replace,tree from,tree to)907 cp_genericize_init (tree *replace, tree from, tree to)
908 {
909 if (TREE_CODE (from) == VEC_INIT_EXPR)
910 {
911 tree init = expand_vec_init_expr (to, from, tf_warning_or_error);
912
913 /* Make cp_gimplify_init_expr call replace_decl. */
914 *replace = fold_convert (void_type_node, init);
915 }
916 else if (flag_exceptions
917 && TREE_CODE (from) == CONSTRUCTOR
918 && TREE_SIDE_EFFECTS (from)
919 && TYPE_HAS_NONTRIVIAL_DESTRUCTOR (TREE_TYPE (from)))
920 {
921 to = cp_stabilize_reference (to);
922 replace_placeholders (from, to);
923 *replace = split_nonconstant_init (to, from);
924 }
925 }
926
927 /* For an INIT_EXPR, replace the INIT_EXPR itself. */
928
929 static void
cp_genericize_init_expr(tree * stmt_p)930 cp_genericize_init_expr (tree *stmt_p)
931 {
932 iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
933 tree to = TREE_OPERAND (*stmt_p, 0);
934 tree from = TREE_OPERAND (*stmt_p, 1);
935 if (SIMPLE_TARGET_EXPR_P (from)
936 /* Return gets confused if we clobber its INIT_EXPR this soon. */
937 && TREE_CODE (to) != RESULT_DECL)
938 from = TARGET_EXPR_INITIAL (from);
939 cp_genericize_init (stmt_p, from, to);
940 }
941
942 /* For a TARGET_EXPR, change the TARGET_EXPR_INITIAL. We will need to use
943 replace_decl later when we know what we're initializing. */
944
945 static void
cp_genericize_target_expr(tree * stmt_p)946 cp_genericize_target_expr (tree *stmt_p)
947 {
948 iloc_sentinel ils = EXPR_LOCATION (*stmt_p);
949 tree slot = TARGET_EXPR_SLOT (*stmt_p);
950 cp_genericize_init (&TARGET_EXPR_INITIAL (*stmt_p),
951 TARGET_EXPR_INITIAL (*stmt_p), slot);
952 gcc_assert (!DECL_INITIAL (slot));
953 }
954
955 /* Genericization context. */
956
957 struct cp_genericize_data
958 {
959 hash_set<tree> *p_set;
960 auto_vec<tree> bind_expr_stack;
961 struct cp_genericize_omp_taskreg *omp_ctx;
962 tree try_block;
963 bool no_sanitize_p;
964 bool handle_invisiref_parm_p;
965 };
966
967 /* Perform any pre-gimplification folding of C++ front end trees to
968 GENERIC.
969 Note: The folding of non-omp cases is something to move into
970 the middle-end. As for now we have most foldings only on GENERIC
971 in fold-const, we need to perform this before transformation to
972 GIMPLE-form. */
973
974 static tree
cp_fold_r(tree * stmt_p,int * walk_subtrees,void * data_)975 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data_)
976 {
977 cp_fold_data *data = (cp_fold_data*)data_;
978 tree stmt = *stmt_p;
979 enum tree_code code = TREE_CODE (stmt);
980
981 switch (code)
982 {
983 case PTRMEM_CST:
984 if (TREE_CODE (PTRMEM_CST_MEMBER (stmt)) == FUNCTION_DECL
985 && DECL_IMMEDIATE_FUNCTION_P (PTRMEM_CST_MEMBER (stmt)))
986 {
987 if (!data->pset.add (stmt))
988 error_at (PTRMEM_CST_LOCATION (stmt),
989 "taking address of an immediate function %qD",
990 PTRMEM_CST_MEMBER (stmt));
991 stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
992 break;
993 }
994 break;
995
996 case ADDR_EXPR:
997 if (TREE_CODE (TREE_OPERAND (stmt, 0)) == FUNCTION_DECL
998 && DECL_IMMEDIATE_FUNCTION_P (TREE_OPERAND (stmt, 0)))
999 {
1000 error_at (EXPR_LOCATION (stmt),
1001 "taking address of an immediate function %qD",
1002 TREE_OPERAND (stmt, 0));
1003 stmt = *stmt_p = build_zero_cst (TREE_TYPE (stmt));
1004 break;
1005 }
1006 break;
1007
1008 case CALL_EXPR:
1009 if (tree fndecl = cp_get_callee_fndecl_nofold (stmt))
1010 if (DECL_IMMEDIATE_FUNCTION_P (fndecl)
1011 && source_location_current_p (fndecl))
1012 *stmt_p = stmt = cxx_constant_value (stmt);
1013 break;
1014
1015 case VAR_DECL:
1016 /* In initializers replace anon union artificial VAR_DECLs
1017 with their DECL_VALUE_EXPRs, as nothing will do it later.
1018 Ditto for structured bindings. */
1019 if (!data->genericize
1020 && DECL_HAS_VALUE_EXPR_P (stmt)
1021 && (DECL_ANON_UNION_VAR_P (stmt)
1022 || (DECL_DECOMPOSITION_P (stmt) && DECL_DECOMP_BASE (stmt))))
1023 {
1024 *stmt_p = stmt = unshare_expr (DECL_VALUE_EXPR (stmt));
1025 break;
1026 }
1027 break;
1028
1029 default:
1030 break;
1031 }
1032
1033 *stmt_p = stmt = cp_fold (*stmt_p);
1034
1035 if (data->pset.add (stmt))
1036 {
1037 /* Don't walk subtrees of stmts we've already walked once, otherwise
1038 we can have exponential complexity with e.g. lots of nested
1039 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return
1040 always the same tree, which the first time cp_fold_r has been
1041 called on it had the subtrees walked. */
1042 *walk_subtrees = 0;
1043 return NULL;
1044 }
1045
1046 code = TREE_CODE (stmt);
1047 switch (code)
1048 {
1049 tree x;
1050 int i, n;
1051 case OMP_FOR:
1052 case OMP_SIMD:
1053 case OMP_DISTRIBUTE:
1054 case OMP_LOOP:
1055 case OMP_TASKLOOP:
1056 case OACC_LOOP:
1057 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL);
1058 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL);
1059 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL);
1060 x = OMP_FOR_COND (stmt);
1061 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison)
1062 {
1063 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL);
1064 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL);
1065 }
1066 else if (x && TREE_CODE (x) == TREE_VEC)
1067 {
1068 n = TREE_VEC_LENGTH (x);
1069 for (i = 0; i < n; i++)
1070 {
1071 tree o = TREE_VEC_ELT (x, i);
1072 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison)
1073 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1074 }
1075 }
1076 x = OMP_FOR_INCR (stmt);
1077 if (x && TREE_CODE (x) == TREE_VEC)
1078 {
1079 n = TREE_VEC_LENGTH (x);
1080 for (i = 0; i < n; i++)
1081 {
1082 tree o = TREE_VEC_ELT (x, i);
1083 if (o && TREE_CODE (o) == MODIFY_EXPR)
1084 o = TREE_OPERAND (o, 1);
1085 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR
1086 || TREE_CODE (o) == POINTER_PLUS_EXPR))
1087 {
1088 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL);
1089 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL);
1090 }
1091 }
1092 }
1093 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL);
1094 *walk_subtrees = 0;
1095 return NULL;
1096
1097 case IF_STMT:
1098 if (IF_STMT_CONSTEVAL_P (stmt))
1099 {
1100 /* Don't walk THEN_CLAUSE (stmt) for consteval if. IF_COND is always
1101 boolean_false_node. */
1102 cp_walk_tree (&ELSE_CLAUSE (stmt), cp_fold_r, data, NULL);
1103 cp_walk_tree (&IF_SCOPE (stmt), cp_fold_r, data, NULL);
1104 *walk_subtrees = 0;
1105 return NULL;
1106 }
1107 break;
1108
1109 /* These are only for genericize time; they're here rather than in
1110 cp_genericize to avoid problems with the invisible reference
1111 transition. */
1112 case INIT_EXPR:
1113 if (data->genericize)
1114 cp_genericize_init_expr (stmt_p);
1115 break;
1116
1117 case TARGET_EXPR:
1118 if (data->genericize)
1119 cp_genericize_target_expr (stmt_p);
1120 break;
1121
1122 default:
1123 break;
1124 }
1125
1126 return NULL;
1127 }
1128
1129 /* Fold ALL the trees! FIXME we should be able to remove this, but
1130 apparently that still causes optimization regressions. */
1131
1132 void
cp_fold_function(tree fndecl)1133 cp_fold_function (tree fndecl)
1134 {
1135 cp_fold_data data (/*genericize*/true);
1136 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &data, NULL);
1137 }
1138
1139 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */
1140
genericize_spaceship(tree expr)1141 static tree genericize_spaceship (tree expr)
1142 {
1143 iloc_sentinel s (cp_expr_location (expr));
1144 tree type = TREE_TYPE (expr);
1145 tree op0 = TREE_OPERAND (expr, 0);
1146 tree op1 = TREE_OPERAND (expr, 1);
1147 return genericize_spaceship (input_location, type, op0, op1);
1148 }
1149
1150 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type
1151 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses
1152 the middle-end (c++/88256). If EXPR is a DECL, use add_stmt and return
1153 NULL_TREE; otherwise return a COMPOUND_STMT of the DECL_EXPR and EXPR. */
1154
1155 tree
predeclare_vla(tree expr)1156 predeclare_vla (tree expr)
1157 {
1158 tree type = TREE_TYPE (expr);
1159 if (type == error_mark_node)
1160 return expr;
1161 if (is_typedef_decl (expr))
1162 type = DECL_ORIGINAL_TYPE (expr);
1163
1164 /* We need to strip pointers for gimplify_type_sizes. */
1165 tree vla = type;
1166 while (POINTER_TYPE_P (vla))
1167 {
1168 if (TYPE_NAME (vla))
1169 return expr;
1170 vla = TREE_TYPE (vla);
1171 }
1172 if (vla == type || TYPE_NAME (vla)
1173 || !variably_modified_type_p (vla, NULL_TREE))
1174 return expr;
1175
1176 tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla);
1177 DECL_ARTIFICIAL (decl) = 1;
1178 TYPE_NAME (vla) = decl;
1179 tree dexp = build_stmt (input_location, DECL_EXPR, decl);
1180 if (DECL_P (expr))
1181 {
1182 add_stmt (dexp);
1183 return NULL_TREE;
1184 }
1185 else
1186 {
1187 expr = build2 (COMPOUND_EXPR, type, dexp, expr);
1188 return expr;
1189 }
1190 }
1191
1192 /* Perform any pre-gimplification lowering of C++ front end trees to
1193 GENERIC. */
1194
1195 static tree
cp_genericize_r(tree * stmt_p,int * walk_subtrees,void * data)1196 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1197 {
1198 tree stmt = *stmt_p;
1199 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data;
1200 hash_set<tree> *p_set = wtd->p_set;
1201
1202 /* If in an OpenMP context, note var uses. */
1203 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1204 && (VAR_P (stmt)
1205 || TREE_CODE (stmt) == PARM_DECL
1206 || TREE_CODE (stmt) == RESULT_DECL)
1207 && omp_var_to_track (stmt))
1208 omp_cxx_notice_variable (wtd->omp_ctx, stmt);
1209
1210 /* Don't dereference parms in a thunk, pass the references through. */
1211 if ((TREE_CODE (stmt) == CALL_EXPR && call_from_lambda_thunk_p (stmt))
1212 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt)))
1213 {
1214 *walk_subtrees = 0;
1215 return NULL;
1216 }
1217
1218 /* Dereference invisible reference parms. */
1219 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt))
1220 {
1221 *stmt_p = convert_from_reference (stmt);
1222 p_set->add (*stmt_p);
1223 *walk_subtrees = 0;
1224 return NULL;
1225 }
1226
1227 /* Map block scope extern declarations to visible declarations with the
1228 same name and type in outer scopes if any. */
1229 if (VAR_OR_FUNCTION_DECL_P (stmt) && DECL_LOCAL_DECL_P (stmt))
1230 if (tree alias = DECL_LOCAL_DECL_ALIAS (stmt))
1231 {
1232 if (alias != error_mark_node)
1233 {
1234 *stmt_p = alias;
1235 TREE_USED (alias) |= TREE_USED (stmt);
1236 }
1237 *walk_subtrees = 0;
1238 return NULL;
1239 }
1240
1241 if (TREE_CODE (stmt) == INTEGER_CST
1242 && TYPE_REF_P (TREE_TYPE (stmt))
1243 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1244 && !wtd->no_sanitize_p)
1245 {
1246 ubsan_maybe_instrument_reference (stmt_p);
1247 if (*stmt_p != stmt)
1248 {
1249 *walk_subtrees = 0;
1250 return NULL_TREE;
1251 }
1252 }
1253
1254 /* Other than invisiref parms, don't walk the same tree twice. */
1255 if (p_set->contains (stmt))
1256 {
1257 *walk_subtrees = 0;
1258 return NULL_TREE;
1259 }
1260
1261 switch (TREE_CODE (stmt))
1262 {
1263 case ADDR_EXPR:
1264 if (is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1265 {
1266 /* If in an OpenMP context, note var uses. */
1267 if (__builtin_expect (wtd->omp_ctx != NULL, 0)
1268 && omp_var_to_track (TREE_OPERAND (stmt, 0)))
1269 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0));
1270 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1271 *walk_subtrees = 0;
1272 }
1273 break;
1274
1275 case RETURN_EXPR:
1276 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0)))
1277 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */
1278 *walk_subtrees = 0;
1279 break;
1280
1281 case OMP_CLAUSE:
1282 switch (OMP_CLAUSE_CODE (stmt))
1283 {
1284 case OMP_CLAUSE_LASTPRIVATE:
1285 /* Don't dereference an invisiref in OpenMP clauses. */
1286 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1287 {
1288 *walk_subtrees = 0;
1289 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt))
1290 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt),
1291 cp_genericize_r, data, NULL);
1292 }
1293 break;
1294 case OMP_CLAUSE_PRIVATE:
1295 /* Don't dereference an invisiref in OpenMP clauses. */
1296 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1297 *walk_subtrees = 0;
1298 else if (wtd->omp_ctx != NULL)
1299 {
1300 /* Private clause doesn't cause any references to the
1301 var in outer contexts, avoid calling
1302 omp_cxx_notice_variable for it. */
1303 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx;
1304 wtd->omp_ctx = NULL;
1305 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r,
1306 data, NULL);
1307 wtd->omp_ctx = old;
1308 *walk_subtrees = 0;
1309 }
1310 break;
1311 case OMP_CLAUSE_SHARED:
1312 case OMP_CLAUSE_FIRSTPRIVATE:
1313 case OMP_CLAUSE_COPYIN:
1314 case OMP_CLAUSE_COPYPRIVATE:
1315 case OMP_CLAUSE_INCLUSIVE:
1316 case OMP_CLAUSE_EXCLUSIVE:
1317 /* Don't dereference an invisiref in OpenMP clauses. */
1318 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1319 *walk_subtrees = 0;
1320 break;
1321 case OMP_CLAUSE_REDUCTION:
1322 case OMP_CLAUSE_IN_REDUCTION:
1323 case OMP_CLAUSE_TASK_REDUCTION:
1324 /* Don't dereference an invisiref in reduction clause's
1325 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE}
1326 still needs to be genericized. */
1327 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt)))
1328 {
1329 *walk_subtrees = 0;
1330 if (OMP_CLAUSE_REDUCTION_INIT (stmt))
1331 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt),
1332 cp_genericize_r, data, NULL);
1333 if (OMP_CLAUSE_REDUCTION_MERGE (stmt))
1334 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt),
1335 cp_genericize_r, data, NULL);
1336 }
1337 break;
1338 default:
1339 break;
1340 }
1341 break;
1342
1343 /* Due to the way voidify_wrapper_expr is written, we don't get a chance
1344 to lower this construct before scanning it, so we need to lower these
1345 before doing anything else. */
1346 case CLEANUP_STMT:
1347 *stmt_p = build2_loc (EXPR_LOCATION (stmt),
1348 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR
1349 : TRY_FINALLY_EXPR,
1350 void_type_node,
1351 CLEANUP_BODY (stmt),
1352 CLEANUP_EXPR (stmt));
1353 break;
1354
1355 case IF_STMT:
1356 genericize_if_stmt (stmt_p);
1357 /* *stmt_p has changed, tail recurse to handle it again. */
1358 return cp_genericize_r (stmt_p, walk_subtrees, data);
1359
1360 /* COND_EXPR might have incompatible types in branches if one or both
1361 arms are bitfields. Fix it up now. */
1362 case COND_EXPR:
1363 {
1364 tree type_left
1365 = (TREE_OPERAND (stmt, 1)
1366 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1))
1367 : NULL_TREE);
1368 tree type_right
1369 = (TREE_OPERAND (stmt, 2)
1370 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2))
1371 : NULL_TREE);
1372 if (type_left
1373 && !useless_type_conversion_p (TREE_TYPE (stmt),
1374 TREE_TYPE (TREE_OPERAND (stmt, 1))))
1375 {
1376 TREE_OPERAND (stmt, 1)
1377 = fold_convert (type_left, TREE_OPERAND (stmt, 1));
1378 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1379 type_left));
1380 }
1381 if (type_right
1382 && !useless_type_conversion_p (TREE_TYPE (stmt),
1383 TREE_TYPE (TREE_OPERAND (stmt, 2))))
1384 {
1385 TREE_OPERAND (stmt, 2)
1386 = fold_convert (type_right, TREE_OPERAND (stmt, 2));
1387 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt),
1388 type_right));
1389 }
1390 }
1391 break;
1392
1393 case BIND_EXPR:
1394 if (__builtin_expect (wtd->omp_ctx != NULL, 0))
1395 {
1396 tree decl;
1397 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl))
1398 if (VAR_P (decl)
1399 && !DECL_EXTERNAL (decl)
1400 && omp_var_to_track (decl))
1401 {
1402 splay_tree_node n
1403 = splay_tree_lookup (wtd->omp_ctx->variables,
1404 (splay_tree_key) decl);
1405 if (n == NULL)
1406 splay_tree_insert (wtd->omp_ctx->variables,
1407 (splay_tree_key) decl,
1408 TREE_STATIC (decl)
1409 ? OMP_CLAUSE_DEFAULT_SHARED
1410 : OMP_CLAUSE_DEFAULT_PRIVATE);
1411 }
1412 }
1413 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR))
1414 {
1415 /* The point here is to not sanitize static initializers. */
1416 bool no_sanitize_p = wtd->no_sanitize_p;
1417 wtd->no_sanitize_p = true;
1418 for (tree decl = BIND_EXPR_VARS (stmt);
1419 decl;
1420 decl = DECL_CHAIN (decl))
1421 if (VAR_P (decl)
1422 && TREE_STATIC (decl)
1423 && DECL_INITIAL (decl))
1424 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL);
1425 wtd->no_sanitize_p = no_sanitize_p;
1426 }
1427 wtd->bind_expr_stack.safe_push (stmt);
1428 cp_walk_tree (&BIND_EXPR_BODY (stmt),
1429 cp_genericize_r, data, NULL);
1430 wtd->bind_expr_stack.pop ();
1431 break;
1432
1433 case USING_STMT:
1434 {
1435 tree block = NULL_TREE;
1436
1437 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL
1438 BLOCK, and append an IMPORTED_DECL to its
1439 BLOCK_VARS chained list. */
1440 if (wtd->bind_expr_stack.exists ())
1441 {
1442 int i;
1443 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--)
1444 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i])))
1445 break;
1446 }
1447 if (block)
1448 {
1449 tree decl = TREE_OPERAND (stmt, 0);
1450 gcc_assert (decl);
1451
1452 if (undeduced_auto_decl (decl))
1453 /* Omit from the GENERIC, the back-end can't handle it. */;
1454 else
1455 {
1456 tree using_directive = make_node (IMPORTED_DECL);
1457 TREE_TYPE (using_directive) = void_type_node;
1458 DECL_CONTEXT (using_directive) = current_function_decl;
1459 DECL_SOURCE_LOCATION (using_directive)
1460 = cp_expr_loc_or_input_loc (stmt);
1461
1462 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl;
1463 DECL_CHAIN (using_directive) = BLOCK_VARS (block);
1464 BLOCK_VARS (block) = using_directive;
1465 }
1466 }
1467 /* The USING_STMT won't appear in GENERIC. */
1468 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1469 *walk_subtrees = 0;
1470 }
1471 break;
1472
1473 case DECL_EXPR:
1474 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL)
1475 {
1476 /* Using decls inside DECL_EXPRs are just dropped on the floor. */
1477 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node);
1478 *walk_subtrees = 0;
1479 }
1480 else
1481 {
1482 tree d = DECL_EXPR_DECL (stmt);
1483 if (VAR_P (d))
1484 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d));
1485 }
1486 break;
1487
1488 case OMP_PARALLEL:
1489 case OMP_TASK:
1490 case OMP_TASKLOOP:
1491 {
1492 struct cp_genericize_omp_taskreg omp_ctx;
1493 tree c, decl;
1494 splay_tree_node n;
1495
1496 *walk_subtrees = 0;
1497 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL);
1498 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL;
1499 omp_ctx.default_shared = omp_ctx.is_parallel;
1500 omp_ctx.outer = wtd->omp_ctx;
1501 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
1502 wtd->omp_ctx = &omp_ctx;
1503 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c))
1504 switch (OMP_CLAUSE_CODE (c))
1505 {
1506 case OMP_CLAUSE_SHARED:
1507 case OMP_CLAUSE_PRIVATE:
1508 case OMP_CLAUSE_FIRSTPRIVATE:
1509 case OMP_CLAUSE_LASTPRIVATE:
1510 decl = OMP_CLAUSE_DECL (c);
1511 if (decl == error_mark_node || !omp_var_to_track (decl))
1512 break;
1513 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl);
1514 if (n != NULL)
1515 break;
1516 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl,
1517 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1518 ? OMP_CLAUSE_DEFAULT_SHARED
1519 : OMP_CLAUSE_DEFAULT_PRIVATE);
1520 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer)
1521 omp_cxx_notice_variable (omp_ctx.outer, decl);
1522 break;
1523 case OMP_CLAUSE_DEFAULT:
1524 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED)
1525 omp_ctx.default_shared = true;
1526 default:
1527 break;
1528 }
1529 if (TREE_CODE (stmt) == OMP_TASKLOOP)
1530 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1531 cp_genericize_r, cp_walk_subtrees);
1532 else
1533 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL);
1534 wtd->omp_ctx = omp_ctx.outer;
1535 splay_tree_delete (omp_ctx.variables);
1536 }
1537 break;
1538
1539 case OMP_TARGET:
1540 cfun->has_omp_target = true;
1541 break;
1542
1543 case TRY_BLOCK:
1544 {
1545 *walk_subtrees = 0;
1546 tree try_block = wtd->try_block;
1547 wtd->try_block = stmt;
1548 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL);
1549 wtd->try_block = try_block;
1550 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL);
1551 }
1552 break;
1553
1554 case MUST_NOT_THROW_EXPR:
1555 /* MUST_NOT_THROW_COND might be something else with TM. */
1556 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE)
1557 {
1558 *walk_subtrees = 0;
1559 tree try_block = wtd->try_block;
1560 wtd->try_block = stmt;
1561 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1562 wtd->try_block = try_block;
1563 }
1564 break;
1565
1566 case THROW_EXPR:
1567 {
1568 location_t loc = location_of (stmt);
1569 if (warning_suppressed_p (stmt /* What warning? */))
1570 /* Never mind. */;
1571 else if (wtd->try_block)
1572 {
1573 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR)
1574 {
1575 auto_diagnostic_group d;
1576 if (warning_at (loc, OPT_Wterminate,
1577 "%<throw%> will always call %<terminate%>")
1578 && cxx_dialect >= cxx11
1579 && DECL_DESTRUCTOR_P (current_function_decl))
1580 inform (loc, "in C++11 destructors default to %<noexcept%>");
1581 }
1582 }
1583 else
1584 {
1585 if (warn_cxx11_compat && cxx_dialect < cxx11
1586 && DECL_DESTRUCTOR_P (current_function_decl)
1587 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl))
1588 == NULL_TREE)
1589 && (get_defaulted_eh_spec (current_function_decl)
1590 == empty_except_spec))
1591 warning_at (loc, OPT_Wc__11_compat,
1592 "in C++11 this %<throw%> will call %<terminate%> "
1593 "because destructors default to %<noexcept%>");
1594 }
1595 }
1596 break;
1597
1598 case CONVERT_EXPR:
1599 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt));
1600 break;
1601
1602 case SPACESHIP_EXPR:
1603 *stmt_p = genericize_spaceship (*stmt_p);
1604 break;
1605
1606 case PTRMEM_CST:
1607 /* By the time we get here we're handing off to the back end, so we don't
1608 need or want to preserve PTRMEM_CST anymore. */
1609 *stmt_p = cplus_expand_constant (stmt);
1610 *walk_subtrees = 0;
1611 break;
1612
1613 case MEM_REF:
1614 /* For MEM_REF, make sure not to sanitize the second operand even
1615 if it has reference type. It is just an offset with a type
1616 holding other information. There is no other processing we
1617 need to do for INTEGER_CSTs, so just ignore the second argument
1618 unconditionally. */
1619 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL);
1620 *walk_subtrees = 0;
1621 break;
1622
1623 case NOP_EXPR:
1624 *stmt_p = predeclare_vla (*stmt_p);
1625 if (!wtd->no_sanitize_p
1626 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)
1627 && TYPE_REF_P (TREE_TYPE (stmt)))
1628 ubsan_maybe_instrument_reference (stmt_p);
1629 break;
1630
1631 case CALL_EXPR:
1632 /* Evaluate function concept checks instead of treating them as
1633 normal functions. */
1634 if (concept_check_p (stmt))
1635 {
1636 *stmt_p = evaluate_concept_check (stmt);
1637 * walk_subtrees = 0;
1638 break;
1639 }
1640
1641 if (!wtd->no_sanitize_p
1642 && sanitize_flags_p ((SANITIZE_NULL
1643 | SANITIZE_ALIGNMENT | SANITIZE_VPTR)))
1644 {
1645 tree fn = CALL_EXPR_FN (stmt);
1646 if (fn != NULL_TREE
1647 && !error_operand_p (fn)
1648 && INDIRECT_TYPE_P (TREE_TYPE (fn))
1649 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE)
1650 {
1651 bool is_ctor
1652 = TREE_CODE (fn) == ADDR_EXPR
1653 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
1654 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0));
1655 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT))
1656 ubsan_maybe_instrument_member_call (stmt, is_ctor);
1657 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor)
1658 cp_ubsan_maybe_instrument_member_call (stmt);
1659 }
1660 else if (fn == NULL_TREE
1661 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL
1662 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST
1663 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))))
1664 *walk_subtrees = 0;
1665 }
1666 /* Fall through. */
1667 case AGGR_INIT_EXPR:
1668 /* For calls to a multi-versioned function, overload resolution
1669 returns the function with the highest target priority, that is,
1670 the version that will checked for dispatching first. If this
1671 version is inlinable, a direct call to this version can be made
1672 otherwise the call should go through the dispatcher. */
1673 {
1674 tree fn = cp_get_callee_fndecl_nofold (stmt);
1675 if (fn && DECL_FUNCTION_VERSIONED (fn)
1676 && (current_function_decl == NULL
1677 || !targetm.target_option.can_inline_p (current_function_decl,
1678 fn)))
1679 if (tree dis = get_function_version_dispatcher (fn))
1680 {
1681 mark_versions_used (dis);
1682 dis = build_address (dis);
1683 if (TREE_CODE (stmt) == CALL_EXPR)
1684 CALL_EXPR_FN (stmt) = dis;
1685 else
1686 AGGR_INIT_EXPR_FN (stmt) = dis;
1687 }
1688 }
1689 break;
1690
1691 case TARGET_EXPR:
1692 if (TARGET_EXPR_INITIAL (stmt)
1693 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR
1694 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt)))
1695 TARGET_EXPR_NO_ELIDE (stmt) = 1;
1696 break;
1697
1698 case TEMPLATE_ID_EXPR:
1699 gcc_assert (concept_check_p (stmt));
1700 /* Emit the value of the concept check. */
1701 *stmt_p = evaluate_concept_check (stmt);
1702 walk_subtrees = 0;
1703 break;
1704
1705 case OMP_DISTRIBUTE:
1706 /* Need to explicitly instantiate copy ctors on class iterators of
1707 composite distribute parallel for. */
1708 if (OMP_FOR_INIT (*stmt_p) == NULL_TREE)
1709 {
1710 tree *data[4] = { NULL, NULL, NULL, NULL };
1711 tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p),
1712 find_combined_omp_for, data, NULL);
1713 if (inner != NULL_TREE
1714 && TREE_CODE (inner) == OMP_FOR)
1715 {
1716 for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++)
1717 if (OMP_FOR_ORIG_DECLS (inner)
1718 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1719 i)) == TREE_LIST
1720 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner),
1721 i)))
1722 {
1723 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i);
1724 /* Class iterators aren't allowed on OMP_SIMD, so the only
1725 case we need to solve is distribute parallel for. */
1726 gcc_assert (TREE_CODE (inner) == OMP_FOR
1727 && data[1]);
1728 tree orig_decl = TREE_PURPOSE (orig);
1729 tree c, cl = NULL_TREE;
1730 for (c = OMP_FOR_CLAUSES (inner);
1731 c; c = OMP_CLAUSE_CHAIN (c))
1732 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1733 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
1734 && OMP_CLAUSE_DECL (c) == orig_decl)
1735 {
1736 cl = c;
1737 break;
1738 }
1739 if (cl == NULL_TREE)
1740 {
1741 for (c = OMP_PARALLEL_CLAUSES (*data[1]);
1742 c; c = OMP_CLAUSE_CHAIN (c))
1743 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1744 && OMP_CLAUSE_DECL (c) == orig_decl)
1745 {
1746 cl = c;
1747 break;
1748 }
1749 }
1750 if (cl)
1751 {
1752 orig_decl = require_complete_type (orig_decl);
1753 tree inner_type = TREE_TYPE (orig_decl);
1754 if (orig_decl == error_mark_node)
1755 continue;
1756 if (TYPE_REF_P (TREE_TYPE (orig_decl)))
1757 inner_type = TREE_TYPE (inner_type);
1758
1759 while (TREE_CODE (inner_type) == ARRAY_TYPE)
1760 inner_type = TREE_TYPE (inner_type);
1761 get_copy_ctor (inner_type, tf_warning_or_error);
1762 }
1763 }
1764 }
1765 }
1766 /* FALLTHRU */
1767
1768 case FOR_STMT:
1769 case WHILE_STMT:
1770 case DO_STMT:
1771 case SWITCH_STMT:
1772 case CONTINUE_STMT:
1773 case BREAK_STMT:
1774 case OMP_FOR:
1775 case OMP_SIMD:
1776 case OMP_LOOP:
1777 case OACC_LOOP:
1778 case STATEMENT_LIST:
1779 /* These cases are handled by shared code. */
1780 c_genericize_control_stmt (stmt_p, walk_subtrees, data,
1781 cp_genericize_r, cp_walk_subtrees);
1782 break;
1783
1784 case BIT_CAST_EXPR:
1785 *stmt_p = build1_loc (EXPR_LOCATION (stmt), VIEW_CONVERT_EXPR,
1786 TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1787 break;
1788
1789 default:
1790 if (IS_TYPE_OR_DECL_P (stmt))
1791 *walk_subtrees = 0;
1792 break;
1793 }
1794
1795 p_set->add (*stmt_p);
1796
1797 return NULL;
1798 }
1799
1800 /* Lower C++ front end trees to GENERIC in T_P. */
1801
1802 static void
cp_genericize_tree(tree * t_p,bool handle_invisiref_parm_p)1803 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p)
1804 {
1805 struct cp_genericize_data wtd;
1806
1807 wtd.p_set = new hash_set<tree>;
1808 wtd.bind_expr_stack.create (0);
1809 wtd.omp_ctx = NULL;
1810 wtd.try_block = NULL_TREE;
1811 wtd.no_sanitize_p = false;
1812 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p;
1813 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL);
1814 delete wtd.p_set;
1815 if (sanitize_flags_p (SANITIZE_VPTR))
1816 cp_ubsan_instrument_member_accesses (t_p);
1817 }
1818
1819 /* If a function that should end with a return in non-void
1820 function doesn't obviously end with return, add ubsan
1821 instrumentation code to verify it at runtime. If -fsanitize=return
1822 is not enabled, instrument __builtin_unreachable. */
1823
1824 static void
cp_maybe_instrument_return(tree fndecl)1825 cp_maybe_instrument_return (tree fndecl)
1826 {
1827 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
1828 || DECL_CONSTRUCTOR_P (fndecl)
1829 || DECL_DESTRUCTOR_P (fndecl)
1830 || !targetm.warn_func_return (fndecl))
1831 return;
1832
1833 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl)
1834 /* Don't add __builtin_unreachable () if not optimizing, it will not
1835 improve any optimizations in that case, just break UB code.
1836 Don't add it if -fsanitize=unreachable -fno-sanitize=return either,
1837 UBSan covers this with ubsan_instrument_return above where sufficient
1838 information is provided, while the __builtin_unreachable () below
1839 if return sanitization is disabled will just result in hard to
1840 understand runtime error without location. */
1841 && (!optimize
1842 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl)))
1843 return;
1844
1845 tree t = DECL_SAVED_TREE (fndecl);
1846 while (t)
1847 {
1848 switch (TREE_CODE (t))
1849 {
1850 case BIND_EXPR:
1851 t = BIND_EXPR_BODY (t);
1852 continue;
1853 case TRY_FINALLY_EXPR:
1854 case CLEANUP_POINT_EXPR:
1855 t = TREE_OPERAND (t, 0);
1856 continue;
1857 case STATEMENT_LIST:
1858 {
1859 tree_stmt_iterator i = tsi_last (t);
1860 while (!tsi_end_p (i))
1861 {
1862 tree p = tsi_stmt (i);
1863 if (TREE_CODE (p) != DEBUG_BEGIN_STMT)
1864 break;
1865 tsi_prev (&i);
1866 }
1867 if (!tsi_end_p (i))
1868 {
1869 t = tsi_stmt (i);
1870 continue;
1871 }
1872 }
1873 break;
1874 case RETURN_EXPR:
1875 return;
1876 default:
1877 break;
1878 }
1879 break;
1880 }
1881 if (t == NULL_TREE)
1882 return;
1883 tree *p = &DECL_SAVED_TREE (fndecl);
1884 if (TREE_CODE (*p) == BIND_EXPR)
1885 p = &BIND_EXPR_BODY (*p);
1886
1887 location_t loc = DECL_SOURCE_LOCATION (fndecl);
1888 if (sanitize_flags_p (SANITIZE_RETURN, fndecl))
1889 t = ubsan_instrument_return (loc);
1890 else
1891 {
1892 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE);
1893 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0);
1894 }
1895
1896 append_to_statement_list (t, p);
1897 }
1898
1899 void
cp_genericize(tree fndecl)1900 cp_genericize (tree fndecl)
1901 {
1902 tree t;
1903
1904 /* Fix up the types of parms passed by invisible reference. */
1905 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t))
1906 if (TREE_ADDRESSABLE (TREE_TYPE (t)))
1907 {
1908 /* If a function's arguments are copied to create a thunk,
1909 then DECL_BY_REFERENCE will be set -- but the type of the
1910 argument will be a pointer type, so we will never get
1911 here. */
1912 gcc_assert (!DECL_BY_REFERENCE (t));
1913 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t));
1914 TREE_TYPE (t) = DECL_ARG_TYPE (t);
1915 DECL_BY_REFERENCE (t) = 1;
1916 TREE_ADDRESSABLE (t) = 0;
1917 relayout_decl (t);
1918 }
1919
1920 /* Do the same for the return value. */
1921 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl))))
1922 {
1923 t = DECL_RESULT (fndecl);
1924 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t));
1925 DECL_BY_REFERENCE (t) = 1;
1926 TREE_ADDRESSABLE (t) = 0;
1927 relayout_decl (t);
1928 if (DECL_NAME (t))
1929 {
1930 /* Adjust DECL_VALUE_EXPR of the original var. */
1931 tree outer = outer_curly_brace_block (current_function_decl);
1932 tree var;
1933
1934 if (outer)
1935 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
1936 if (VAR_P (var)
1937 && DECL_NAME (t) == DECL_NAME (var)
1938 && DECL_HAS_VALUE_EXPR_P (var)
1939 && DECL_VALUE_EXPR (var) == t)
1940 {
1941 tree val = convert_from_reference (t);
1942 SET_DECL_VALUE_EXPR (var, val);
1943 break;
1944 }
1945 }
1946 }
1947
1948 /* If we're a clone, the body is already GIMPLE. */
1949 if (DECL_CLONED_FUNCTION_P (fndecl))
1950 return;
1951
1952 /* Allow cp_genericize calls to be nested. */
1953 bc_state_t save_state;
1954 save_bc_state (&save_state);
1955
1956 /* We do want to see every occurrence of the parms, so we can't just use
1957 walk_tree's hash functionality. */
1958 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true);
1959
1960 cp_maybe_instrument_return (fndecl);
1961
1962 /* Do everything else. */
1963 c_genericize (fndecl);
1964 restore_bc_state (&save_state);
1965 }
1966
1967 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be
1968 NULL if there is in fact nothing to do. ARG2 may be null if FN
1969 actually only takes one argument. */
1970
1971 static tree
cxx_omp_clause_apply_fn(tree fn,tree arg1,tree arg2)1972 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2)
1973 {
1974 tree defparm, parm, t;
1975 int i = 0;
1976 int nargs;
1977 tree *argarray;
1978
1979 if (fn == NULL)
1980 return NULL;
1981
1982 nargs = list_length (DECL_ARGUMENTS (fn));
1983 argarray = XALLOCAVEC (tree, nargs);
1984
1985 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn)));
1986 if (arg2)
1987 defparm = TREE_CHAIN (defparm);
1988
1989 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE;
1990 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE)
1991 {
1992 tree inner_type = TREE_TYPE (arg1);
1993 tree start1, end1, p1;
1994 tree start2 = NULL, p2 = NULL;
1995 tree ret = NULL, lab;
1996
1997 start1 = arg1;
1998 start2 = arg2;
1999 do
2000 {
2001 inner_type = TREE_TYPE (inner_type);
2002 start1 = build4 (ARRAY_REF, inner_type, start1,
2003 size_zero_node, NULL, NULL);
2004 if (arg2)
2005 start2 = build4 (ARRAY_REF, inner_type, start2,
2006 size_zero_node, NULL, NULL);
2007 }
2008 while (TREE_CODE (inner_type) == ARRAY_TYPE);
2009 start1 = build_fold_addr_expr_loc (input_location, start1);
2010 if (arg2)
2011 start2 = build_fold_addr_expr_loc (input_location, start2);
2012
2013 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
2014 end1 = fold_build_pointer_plus (start1, end1);
2015
2016 p1 = create_tmp_var (TREE_TYPE (start1));
2017 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1);
2018 append_to_statement_list (t, &ret);
2019
2020 if (arg2)
2021 {
2022 p2 = create_tmp_var (TREE_TYPE (start2));
2023 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2);
2024 append_to_statement_list (t, &ret);
2025 }
2026
2027 lab = create_artificial_label (input_location);
2028 t = build1 (LABEL_EXPR, void_type_node, lab);
2029 append_to_statement_list (t, &ret);
2030
2031 argarray[i++] = p1;
2032 if (arg2)
2033 argarray[i++] = p2;
2034 /* Handle default arguments. */
2035 for (parm = defparm; parm && parm != void_list_node;
2036 parm = TREE_CHAIN (parm), i++)
2037 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2038 TREE_PURPOSE (parm), fn,
2039 i - is_method, tf_warning_or_error);
2040 t = build_call_a (fn, i, argarray);
2041 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (t)))
2042 t = build_cplus_new (TREE_TYPE (t), t, tf_warning_or_error);
2043 t = fold_convert (void_type_node, t);
2044 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2045 append_to_statement_list (t, &ret);
2046
2047 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type));
2048 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t);
2049 append_to_statement_list (t, &ret);
2050
2051 if (arg2)
2052 {
2053 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type));
2054 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t);
2055 append_to_statement_list (t, &ret);
2056 }
2057
2058 t = build2 (NE_EXPR, boolean_type_node, p1, end1);
2059 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL);
2060 append_to_statement_list (t, &ret);
2061
2062 return ret;
2063 }
2064 else
2065 {
2066 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1);
2067 if (arg2)
2068 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2);
2069 /* Handle default arguments. */
2070 for (parm = defparm; parm && parm != void_list_node;
2071 parm = TREE_CHAIN (parm), i++)
2072 argarray[i] = convert_default_arg (TREE_VALUE (parm),
2073 TREE_PURPOSE (parm), fn,
2074 i - is_method, tf_warning_or_error);
2075 t = build_call_a (fn, i, argarray);
2076 if (MAYBE_CLASS_TYPE_P (TREE_TYPE (t)))
2077 t = build_cplus_new (TREE_TYPE (t), t, tf_warning_or_error);
2078 t = fold_convert (void_type_node, t);
2079 return fold_build_cleanup_point_expr (TREE_TYPE (t), t);
2080 }
2081 }
2082
2083 /* Return code to initialize DECL with its default constructor, or
2084 NULL if there's nothing to do. */
2085
2086 tree
cxx_omp_clause_default_ctor(tree clause,tree decl,tree)2087 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/)
2088 {
2089 tree info = CP_OMP_CLAUSE_INFO (clause);
2090 tree ret = NULL;
2091
2092 if (info)
2093 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL);
2094
2095 return ret;
2096 }
2097
2098 /* Return code to initialize DST with a copy constructor from SRC. */
2099
2100 tree
cxx_omp_clause_copy_ctor(tree clause,tree dst,tree src)2101 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src)
2102 {
2103 tree info = CP_OMP_CLAUSE_INFO (clause);
2104 tree ret = NULL;
2105
2106 if (info)
2107 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src);
2108 if (ret == NULL)
2109 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2110
2111 return ret;
2112 }
2113
2114 /* Similarly, except use an assignment operator instead. */
2115
2116 tree
cxx_omp_clause_assign_op(tree clause,tree dst,tree src)2117 cxx_omp_clause_assign_op (tree clause, tree dst, tree src)
2118 {
2119 tree info = CP_OMP_CLAUSE_INFO (clause);
2120 tree ret = NULL;
2121
2122 if (info)
2123 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src);
2124 if (ret == NULL)
2125 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
2126
2127 return ret;
2128 }
2129
2130 /* Return code to destroy DECL. */
2131
2132 tree
cxx_omp_clause_dtor(tree clause,tree decl)2133 cxx_omp_clause_dtor (tree clause, tree decl)
2134 {
2135 tree info = CP_OMP_CLAUSE_INFO (clause);
2136 tree ret = NULL;
2137
2138 if (info)
2139 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL);
2140
2141 return ret;
2142 }
2143
2144 /* True if OpenMP should privatize what this DECL points to rather
2145 than the DECL itself. */
2146
2147 bool
cxx_omp_privatize_by_reference(const_tree decl)2148 cxx_omp_privatize_by_reference (const_tree decl)
2149 {
2150 return (TYPE_REF_P (TREE_TYPE (decl))
2151 || is_invisiref_parm (decl));
2152 }
2153
2154 /* Return true if DECL is const qualified var having no mutable member. */
2155 bool
cxx_omp_const_qual_no_mutable(tree decl)2156 cxx_omp_const_qual_no_mutable (tree decl)
2157 {
2158 tree type = TREE_TYPE (decl);
2159 if (TYPE_REF_P (type))
2160 {
2161 if (!is_invisiref_parm (decl))
2162 return false;
2163 type = TREE_TYPE (type);
2164
2165 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl))
2166 {
2167 /* NVR doesn't preserve const qualification of the
2168 variable's type. */
2169 tree outer = outer_curly_brace_block (current_function_decl);
2170 tree var;
2171
2172 if (outer)
2173 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var))
2174 if (VAR_P (var)
2175 && DECL_NAME (decl) == DECL_NAME (var)
2176 && (TYPE_MAIN_VARIANT (type)
2177 == TYPE_MAIN_VARIANT (TREE_TYPE (var))))
2178 {
2179 if (TYPE_READONLY (TREE_TYPE (var)))
2180 type = TREE_TYPE (var);
2181 break;
2182 }
2183 }
2184 }
2185
2186 if (type == error_mark_node)
2187 return false;
2188
2189 /* Variables with const-qualified type having no mutable member
2190 are predetermined shared. */
2191 if (TYPE_READONLY (type) && !cp_has_mutable_p (type))
2192 return true;
2193
2194 return false;
2195 }
2196
2197 /* OMP_CLAUSE_DEFAULT_UNSPECIFIED unless OpenMP sharing attribute
2198 of DECL is predetermined. */
2199
2200 enum omp_clause_default_kind
cxx_omp_predetermined_sharing_1(tree decl)2201 cxx_omp_predetermined_sharing_1 (tree decl)
2202 {
2203 /* Static data members are predetermined shared. */
2204 if (TREE_STATIC (decl))
2205 {
2206 tree ctx = CP_DECL_CONTEXT (decl);
2207 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx))
2208 return OMP_CLAUSE_DEFAULT_SHARED;
2209
2210 if (c_omp_predefined_variable (decl))
2211 return OMP_CLAUSE_DEFAULT_SHARED;
2212 }
2213
2214 /* this may not be specified in data-sharing clauses, still we need
2215 to predetermined it firstprivate. */
2216 if (decl == current_class_ptr)
2217 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
2218
2219 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2220 }
2221
2222 /* Likewise, but also include the artificial vars. We don't want to
2223 disallow the artificial vars being mentioned in explicit clauses,
2224 as we use artificial vars e.g. for loop constructs with random
2225 access iterators other than pointers, but during gimplification
2226 we want to treat them as predetermined. */
2227
2228 enum omp_clause_default_kind
cxx_omp_predetermined_sharing(tree decl)2229 cxx_omp_predetermined_sharing (tree decl)
2230 {
2231 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl);
2232 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
2233 return ret;
2234
2235 /* Predetermine artificial variables holding integral values, those
2236 are usually result of gimplify_one_sizepos or SAVE_EXPR
2237 gimplification. */
2238 if (VAR_P (decl)
2239 && DECL_ARTIFICIAL (decl)
2240 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2241 && !(DECL_LANG_SPECIFIC (decl)
2242 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2243 return OMP_CLAUSE_DEFAULT_SHARED;
2244
2245 /* Similarly for typeinfo symbols. */
2246 if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl))
2247 return OMP_CLAUSE_DEFAULT_SHARED;
2248
2249 return OMP_CLAUSE_DEFAULT_UNSPECIFIED;
2250 }
2251
2252 enum omp_clause_defaultmap_kind
cxx_omp_predetermined_mapping(tree decl)2253 cxx_omp_predetermined_mapping (tree decl)
2254 {
2255 /* Predetermine artificial variables holding integral values, those
2256 are usually result of gimplify_one_sizepos or SAVE_EXPR
2257 gimplification. */
2258 if (VAR_P (decl)
2259 && DECL_ARTIFICIAL (decl)
2260 && INTEGRAL_TYPE_P (TREE_TYPE (decl))
2261 && !(DECL_LANG_SPECIFIC (decl)
2262 && DECL_OMP_PRIVATIZED_MEMBER (decl)))
2263 return OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE;
2264
2265 if (c_omp_predefined_variable (decl))
2266 return OMP_CLAUSE_DEFAULTMAP_TO;
2267
2268 return OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED;
2269 }
2270
2271 /* Finalize an implicitly determined clause. */
2272
2273 void
cxx_omp_finish_clause(tree c,gimple_seq *,bool)2274 cxx_omp_finish_clause (tree c, gimple_seq *, bool /* openacc */)
2275 {
2276 tree decl, inner_type;
2277 bool make_shared = false;
2278
2279 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE
2280 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE
2281 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE
2282 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)))
2283 return;
2284
2285 decl = OMP_CLAUSE_DECL (c);
2286 decl = require_complete_type (decl);
2287 inner_type = TREE_TYPE (decl);
2288 if (decl == error_mark_node)
2289 make_shared = true;
2290 else if (TYPE_REF_P (TREE_TYPE (decl)))
2291 inner_type = TREE_TYPE (inner_type);
2292
2293 /* We're interested in the base element, not arrays. */
2294 while (TREE_CODE (inner_type) == ARRAY_TYPE)
2295 inner_type = TREE_TYPE (inner_type);
2296
2297 /* Check for special function availability by building a call to one.
2298 Save the results, because later we won't be in the right context
2299 for making these queries. */
2300 bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE;
2301 bool last = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE;
2302 if (!make_shared
2303 && CLASS_TYPE_P (inner_type)
2304 && cxx_omp_create_clause_info (c, inner_type, !first, first, last,
2305 true))
2306 make_shared = true;
2307
2308 if (make_shared)
2309 {
2310 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED;
2311 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0;
2312 OMP_CLAUSE_SHARED_READONLY (c) = 0;
2313 }
2314 }
2315
2316 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be
2317 disregarded in OpenMP construct, because it is going to be
2318 remapped during OpenMP lowering. SHARED is true if DECL
2319 is going to be shared, false if it is going to be privatized. */
2320
2321 bool
cxx_omp_disregard_value_expr(tree decl,bool shared)2322 cxx_omp_disregard_value_expr (tree decl, bool shared)
2323 {
2324 if (shared)
2325 return false;
2326 if (VAR_P (decl)
2327 && DECL_HAS_VALUE_EXPR_P (decl)
2328 && DECL_ARTIFICIAL (decl)
2329 && DECL_LANG_SPECIFIC (decl)
2330 && DECL_OMP_PRIVATIZED_MEMBER (decl))
2331 return true;
2332 if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl))
2333 return true;
2334 return false;
2335 }
2336
2337 /* Fold expression X which is used as an rvalue if RVAL is true. */
2338
2339 tree
cp_fold_maybe_rvalue(tree x,bool rval)2340 cp_fold_maybe_rvalue (tree x, bool rval)
2341 {
2342 while (true)
2343 {
2344 x = cp_fold (x);
2345 if (rval)
2346 x = mark_rvalue_use (x);
2347 if (rval && DECL_P (x)
2348 && !TYPE_REF_P (TREE_TYPE (x)))
2349 {
2350 tree v = decl_constant_value (x);
2351 if (v != x && v != error_mark_node)
2352 {
2353 x = v;
2354 continue;
2355 }
2356 }
2357 break;
2358 }
2359 return x;
2360 }
2361
2362 /* Fold expression X which is used as an rvalue. */
2363
2364 tree
cp_fold_rvalue(tree x)2365 cp_fold_rvalue (tree x)
2366 {
2367 return cp_fold_maybe_rvalue (x, true);
2368 }
2369
2370 /* Perform folding on expression X. */
2371
2372 tree
cp_fully_fold(tree x)2373 cp_fully_fold (tree x)
2374 {
2375 if (processing_template_decl)
2376 return x;
2377 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't
2378 have to call both. */
2379 if (cxx_dialect >= cxx11)
2380 {
2381 x = maybe_constant_value (x);
2382 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into
2383 a TARGET_EXPR; undo that here. */
2384 if (TREE_CODE (x) == TARGET_EXPR)
2385 x = TARGET_EXPR_INITIAL (x);
2386 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR
2387 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR
2388 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x))
2389 x = TREE_OPERAND (x, 0);
2390 }
2391 return cp_fold_rvalue (x);
2392 }
2393
2394 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform
2395 in some cases. */
2396
2397 tree
cp_fully_fold_init(tree x)2398 cp_fully_fold_init (tree x)
2399 {
2400 if (processing_template_decl)
2401 return x;
2402 x = cp_fully_fold (x);
2403 cp_fold_data data (/*genericize*/false);
2404 cp_walk_tree (&x, cp_fold_r, &data, NULL);
2405 return x;
2406 }
2407
2408 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer
2409 and certain changes are made to the folding done. Or should be (FIXME). We
2410 never touch maybe_const, as it is only used for the C front-end
2411 C_MAYBE_CONST_EXPR. */
2412
2413 tree
c_fully_fold(tree x,bool,bool *,bool lval)2414 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval)
2415 {
2416 return cp_fold_maybe_rvalue (x, !lval);
2417 }
2418
2419 static GTY((deletable)) hash_map<tree, tree> *fold_cache;
2420
2421 /* Dispose of the whole FOLD_CACHE. */
2422
2423 void
clear_fold_cache(void)2424 clear_fold_cache (void)
2425 {
2426 if (fold_cache != NULL)
2427 fold_cache->empty ();
2428 }
2429
2430 /* This function tries to fold an expression X.
2431 To avoid combinatorial explosion, folding results are kept in fold_cache.
2432 If X is invalid, we don't fold at all.
2433 For performance reasons we don't cache expressions representing a
2434 declaration or constant.
2435 Function returns X or its folded variant. */
2436
2437 static tree
cp_fold(tree x)2438 cp_fold (tree x)
2439 {
2440 tree op0, op1, op2, op3;
2441 tree org_x = x, r = NULL_TREE;
2442 enum tree_code code;
2443 location_t loc;
2444 bool rval_ops = true;
2445
2446 if (!x || x == error_mark_node)
2447 return x;
2448
2449 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node))
2450 return x;
2451
2452 /* Don't bother to cache DECLs or constants. */
2453 if (DECL_P (x) || CONSTANT_CLASS_P (x))
2454 return x;
2455
2456 if (fold_cache == NULL)
2457 fold_cache = hash_map<tree, tree>::create_ggc (101);
2458
2459 if (tree *cached = fold_cache->get (x))
2460 {
2461 /* unshare_expr doesn't recurse into SAVE_EXPRs. If SAVE_EXPR's
2462 argument has been folded into a tree invariant, make sure it is
2463 unshared. See PR112727. */
2464 if (TREE_CODE (x) == SAVE_EXPR && *cached != x)
2465 return unshare_expr (*cached);
2466 return *cached;
2467 }
2468
2469 uid_sensitive_constexpr_evaluation_checker c;
2470
2471 code = TREE_CODE (x);
2472 switch (code)
2473 {
2474 case CLEANUP_POINT_EXPR:
2475 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side
2476 effects. */
2477 r = cp_fold_rvalue (TREE_OPERAND (x, 0));
2478 if (!TREE_SIDE_EFFECTS (r))
2479 x = r;
2480 break;
2481
2482 case SIZEOF_EXPR:
2483 x = fold_sizeof_expr (x);
2484 break;
2485
2486 case VIEW_CONVERT_EXPR:
2487 rval_ops = false;
2488 /* FALLTHRU */
2489 case CONVERT_EXPR:
2490 case NOP_EXPR:
2491 case NON_LVALUE_EXPR:
2492
2493 if (VOID_TYPE_P (TREE_TYPE (x)))
2494 {
2495 /* This is just to make sure we don't end up with casts to
2496 void from error_mark_node. If we just return x, then
2497 cp_fold_r might fold the operand into error_mark_node and
2498 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION
2499 during gimplification doesn't like such casts.
2500 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the
2501 folding of the operand should be in the caches and if in cp_fold_r
2502 it will modify it in place. */
2503 op0 = cp_fold (TREE_OPERAND (x, 0));
2504 if (op0 == error_mark_node)
2505 x = error_mark_node;
2506 break;
2507 }
2508
2509 loc = EXPR_LOCATION (x);
2510 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2511
2512 if (code == CONVERT_EXPR
2513 && SCALAR_TYPE_P (TREE_TYPE (x))
2514 && op0 != void_node)
2515 /* During parsing we used convert_to_*_nofold; re-convert now using the
2516 folding variants, since fold() doesn't do those transformations. */
2517 x = fold (convert (TREE_TYPE (x), op0));
2518 else if (op0 != TREE_OPERAND (x, 0))
2519 {
2520 if (op0 == error_mark_node)
2521 x = error_mark_node;
2522 else
2523 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2524 }
2525 else
2526 x = fold (x);
2527
2528 /* Conversion of an out-of-range value has implementation-defined
2529 behavior; the language considers it different from arithmetic
2530 overflow, which is undefined. */
2531 if (TREE_CODE (op0) == INTEGER_CST
2532 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0))
2533 TREE_OVERFLOW (x) = false;
2534
2535 break;
2536
2537 case INDIRECT_REF:
2538 /* We don't need the decltype(auto) obfuscation anymore. */
2539 if (REF_PARENTHESIZED_P (x))
2540 {
2541 tree p = maybe_undo_parenthesized_ref (x);
2542 if (p != x)
2543 return cp_fold (p);
2544 }
2545 goto unary;
2546
2547 case ADDR_EXPR:
2548 loc = EXPR_LOCATION (x);
2549 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false);
2550
2551 /* Cope with user tricks that amount to offsetof. */
2552 if (op0 != error_mark_node
2553 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0)))
2554 {
2555 tree val = get_base_address (op0);
2556 if (val
2557 && INDIRECT_REF_P (val)
2558 && COMPLETE_TYPE_P (TREE_TYPE (val))
2559 && TREE_CONSTANT (TREE_OPERAND (val, 0)))
2560 {
2561 val = TREE_OPERAND (val, 0);
2562 STRIP_NOPS (val);
2563 val = maybe_constant_value (val);
2564 if (TREE_CODE (val) == INTEGER_CST)
2565 return fold_offsetof (op0, TREE_TYPE (x));
2566 }
2567 }
2568 goto finish_unary;
2569
2570 case REALPART_EXPR:
2571 case IMAGPART_EXPR:
2572 rval_ops = false;
2573 /* FALLTHRU */
2574 case CONJ_EXPR:
2575 case FIX_TRUNC_EXPR:
2576 case FLOAT_EXPR:
2577 case NEGATE_EXPR:
2578 case ABS_EXPR:
2579 case ABSU_EXPR:
2580 case BIT_NOT_EXPR:
2581 case TRUTH_NOT_EXPR:
2582 case FIXED_CONVERT_EXPR:
2583 unary:
2584
2585 loc = EXPR_LOCATION (x);
2586 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2587
2588 finish_unary:
2589 if (op0 != TREE_OPERAND (x, 0))
2590 {
2591 if (op0 == error_mark_node)
2592 x = error_mark_node;
2593 else
2594 {
2595 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0);
2596 if (code == INDIRECT_REF
2597 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF))
2598 {
2599 TREE_READONLY (x) = TREE_READONLY (org_x);
2600 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
2601 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
2602 }
2603 }
2604 }
2605 else
2606 x = fold (x);
2607
2608 gcc_assert (TREE_CODE (x) != COND_EXPR
2609 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0))));
2610 break;
2611
2612 case UNARY_PLUS_EXPR:
2613 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2614 if (op0 == error_mark_node)
2615 x = error_mark_node;
2616 else
2617 x = fold_convert (TREE_TYPE (x), op0);
2618 break;
2619
2620 case POSTDECREMENT_EXPR:
2621 case POSTINCREMENT_EXPR:
2622 case INIT_EXPR:
2623 case PREDECREMENT_EXPR:
2624 case PREINCREMENT_EXPR:
2625 case COMPOUND_EXPR:
2626 case MODIFY_EXPR:
2627 rval_ops = false;
2628 /* FALLTHRU */
2629 case POINTER_PLUS_EXPR:
2630 case PLUS_EXPR:
2631 case POINTER_DIFF_EXPR:
2632 case MINUS_EXPR:
2633 case MULT_EXPR:
2634 case TRUNC_DIV_EXPR:
2635 case CEIL_DIV_EXPR:
2636 case FLOOR_DIV_EXPR:
2637 case ROUND_DIV_EXPR:
2638 case TRUNC_MOD_EXPR:
2639 case CEIL_MOD_EXPR:
2640 case ROUND_MOD_EXPR:
2641 case RDIV_EXPR:
2642 case EXACT_DIV_EXPR:
2643 case MIN_EXPR:
2644 case MAX_EXPR:
2645 case LSHIFT_EXPR:
2646 case RSHIFT_EXPR:
2647 case LROTATE_EXPR:
2648 case RROTATE_EXPR:
2649 case BIT_AND_EXPR:
2650 case BIT_IOR_EXPR:
2651 case BIT_XOR_EXPR:
2652 case TRUTH_AND_EXPR:
2653 case TRUTH_ANDIF_EXPR:
2654 case TRUTH_OR_EXPR:
2655 case TRUTH_ORIF_EXPR:
2656 case TRUTH_XOR_EXPR:
2657 case LT_EXPR: case LE_EXPR:
2658 case GT_EXPR: case GE_EXPR:
2659 case EQ_EXPR: case NE_EXPR:
2660 case UNORDERED_EXPR: case ORDERED_EXPR:
2661 case UNLT_EXPR: case UNLE_EXPR:
2662 case UNGT_EXPR: case UNGE_EXPR:
2663 case UNEQ_EXPR: case LTGT_EXPR:
2664 case RANGE_EXPR: case COMPLEX_EXPR:
2665
2666 loc = EXPR_LOCATION (x);
2667 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops);
2668 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1));
2669
2670 /* decltype(nullptr) has only one value, so optimize away all comparisons
2671 with that type right away, keeping them in the IL causes troubles for
2672 various optimizations. */
2673 if (COMPARISON_CLASS_P (org_x)
2674 && TREE_CODE (TREE_TYPE (op0)) == NULLPTR_TYPE
2675 && TREE_CODE (TREE_TYPE (op1)) == NULLPTR_TYPE)
2676 {
2677 switch (code)
2678 {
2679 case EQ_EXPR:
2680 x = constant_boolean_node (true, TREE_TYPE (x));
2681 break;
2682 case NE_EXPR:
2683 x = constant_boolean_node (false, TREE_TYPE (x));
2684 break;
2685 default:
2686 gcc_unreachable ();
2687 }
2688 return omit_two_operands_loc (loc, TREE_TYPE (x), x,
2689 op0, op1);
2690 }
2691
2692 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1))
2693 {
2694 if (op0 == error_mark_node || op1 == error_mark_node)
2695 x = error_mark_node;
2696 else
2697 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1);
2698 }
2699 else
2700 x = fold (x);
2701
2702 /* This is only needed for -Wnonnull-compare and only if
2703 TREE_NO_WARNING (org_x), but to avoid that option affecting code
2704 generation, we do it always. */
2705 if (COMPARISON_CLASS_P (org_x))
2706 {
2707 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST)
2708 ;
2709 else if (COMPARISON_CLASS_P (x))
2710 {
2711 if (warn_nonnull_compare
2712 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2713 suppress_warning (x, OPT_Wnonnull_compare);
2714 }
2715 /* Otherwise give up on optimizing these, let GIMPLE folders
2716 optimize those later on. */
2717 else if (op0 != TREE_OPERAND (org_x, 0)
2718 || op1 != TREE_OPERAND (org_x, 1))
2719 {
2720 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1);
2721 if (warn_nonnull_compare
2722 && warning_suppressed_p (org_x, OPT_Wnonnull_compare))
2723 suppress_warning (x, OPT_Wnonnull_compare);
2724 }
2725 else
2726 x = org_x;
2727 }
2728
2729 break;
2730
2731 case VEC_COND_EXPR:
2732 case COND_EXPR:
2733 loc = EXPR_LOCATION (x);
2734 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0));
2735 op1 = cp_fold (TREE_OPERAND (x, 1));
2736 op2 = cp_fold (TREE_OPERAND (x, 2));
2737
2738 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE)
2739 {
2740 warning_sentinel s (warn_int_in_bool_context);
2741 if (!VOID_TYPE_P (TREE_TYPE (op1)))
2742 op1 = cp_truthvalue_conversion (op1, tf_warning_or_error);
2743 if (!VOID_TYPE_P (TREE_TYPE (op2)))
2744 op2 = cp_truthvalue_conversion (op2, tf_warning_or_error);
2745 }
2746 else if (VOID_TYPE_P (TREE_TYPE (x)))
2747 {
2748 if (TREE_CODE (op0) == INTEGER_CST)
2749 {
2750 /* If the condition is constant, fold can fold away
2751 the COND_EXPR. If some statement-level uses of COND_EXPR
2752 have one of the branches NULL, avoid folding crash. */
2753 if (!op1)
2754 op1 = build_empty_stmt (loc);
2755 if (!op2)
2756 op2 = build_empty_stmt (loc);
2757 }
2758 else
2759 {
2760 /* Otherwise, don't bother folding a void condition, since
2761 it can't produce a constant value. */
2762 if (op0 != TREE_OPERAND (x, 0)
2763 || op1 != TREE_OPERAND (x, 1)
2764 || op2 != TREE_OPERAND (x, 2))
2765 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2766 break;
2767 }
2768 }
2769
2770 if (op0 != TREE_OPERAND (x, 0)
2771 || op1 != TREE_OPERAND (x, 1)
2772 || op2 != TREE_OPERAND (x, 2))
2773 {
2774 if (op0 == error_mark_node
2775 || op1 == error_mark_node
2776 || op2 == error_mark_node)
2777 x = error_mark_node;
2778 else
2779 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2);
2780 }
2781 else
2782 x = fold (x);
2783
2784 /* A COND_EXPR might have incompatible types in branches if one or both
2785 arms are bitfields. If folding exposed such a branch, fix it up. */
2786 if (TREE_CODE (x) != code
2787 && x != error_mark_node
2788 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x)))
2789 x = fold_convert (TREE_TYPE (org_x), x);
2790
2791 break;
2792
2793 case CALL_EXPR:
2794 {
2795 tree callee = get_callee_fndecl (x);
2796
2797 /* "Inline" calls to std::move/forward and other cast-like functions
2798 by simply folding them into a corresponding cast to their return
2799 type. This is cheaper than relying on the middle end to do so, and
2800 also means we avoid generating useless debug info for them at all.
2801
2802 At this point the argument has already been converted into a
2803 reference, so it suffices to use a NOP_EXPR to express the
2804 cast. */
2805 if ((OPTION_SET_P (flag_fold_simple_inlines)
2806 ? flag_fold_simple_inlines
2807 : !flag_no_inline)
2808 && call_expr_nargs (x) == 1
2809 && decl_in_std_namespace_p (callee)
2810 && DECL_NAME (callee) != NULL_TREE
2811 && (id_equal (DECL_NAME (callee), "move")
2812 || id_equal (DECL_NAME (callee), "forward")
2813 || id_equal (DECL_NAME (callee), "addressof")
2814 /* This addressof equivalent is used heavily in libstdc++. */
2815 || id_equal (DECL_NAME (callee), "__addressof")
2816 || id_equal (DECL_NAME (callee), "as_const")))
2817 {
2818 r = CALL_EXPR_ARG (x, 0);
2819 /* Check that the return and argument types are sane before
2820 folding. */
2821 if (INDIRECT_TYPE_P (TREE_TYPE (x))
2822 && INDIRECT_TYPE_P (TREE_TYPE (r)))
2823 {
2824 if (!same_type_p (TREE_TYPE (x), TREE_TYPE (r)))
2825 r = build_nop (TREE_TYPE (x), r);
2826 x = cp_fold (r);
2827 break;
2828 }
2829 }
2830
2831 int sv = optimize, nw = sv;
2832
2833 /* Some built-in function calls will be evaluated at compile-time in
2834 fold (). Set optimize to 1 when folding __builtin_constant_p inside
2835 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */
2836 if (callee && fndecl_built_in_p (callee) && !optimize
2837 && DECL_IS_BUILTIN_CONSTANT_P (callee)
2838 && current_function_decl
2839 && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
2840 nw = 1;
2841
2842 if (callee && fndecl_built_in_p (callee, BUILT_IN_FRONTEND))
2843 {
2844 switch (DECL_FE_FUNCTION_CODE (callee))
2845 {
2846 /* Defer folding __builtin_is_constant_evaluated. */
2847 case CP_BUILT_IN_IS_CONSTANT_EVALUATED:
2848 break;
2849 case CP_BUILT_IN_SOURCE_LOCATION:
2850 x = fold_builtin_source_location (EXPR_LOCATION (x));
2851 break;
2852 case CP_BUILT_IN_IS_CORRESPONDING_MEMBER:
2853 x = fold_builtin_is_corresponding_member
2854 (EXPR_LOCATION (x), call_expr_nargs (x),
2855 &CALL_EXPR_ARG (x, 0));
2856 break;
2857 case CP_BUILT_IN_IS_POINTER_INTERCONVERTIBLE_WITH_CLASS:
2858 x = fold_builtin_is_pointer_inverconvertible_with_class
2859 (EXPR_LOCATION (x), call_expr_nargs (x),
2860 &CALL_EXPR_ARG (x, 0));
2861 break;
2862 default:
2863 break;
2864 }
2865 break;
2866 }
2867
2868 if (callee
2869 && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION,
2870 BUILT_IN_FRONTEND))
2871 {
2872 x = fold_builtin_source_location (EXPR_LOCATION (x));
2873 break;
2874 }
2875
2876 bool changed = false;
2877 int m = call_expr_nargs (x);
2878 for (int i = 0; i < m; i++)
2879 {
2880 r = cp_fold (CALL_EXPR_ARG (x, i));
2881 if (r != CALL_EXPR_ARG (x, i))
2882 {
2883 if (r == error_mark_node)
2884 {
2885 x = error_mark_node;
2886 break;
2887 }
2888 if (!changed)
2889 x = copy_node (x);
2890 CALL_EXPR_ARG (x, i) = r;
2891 changed = true;
2892 }
2893 }
2894 if (x == error_mark_node)
2895 break;
2896
2897 optimize = nw;
2898 r = fold (x);
2899 optimize = sv;
2900
2901 if (TREE_CODE (r) != CALL_EXPR)
2902 {
2903 x = cp_fold (r);
2904 break;
2905 }
2906
2907 optimize = nw;
2908
2909 /* Invoke maybe_constant_value for functions declared
2910 constexpr and not called with AGGR_INIT_EXPRs.
2911 TODO:
2912 Do constexpr expansion of expressions where the call itself is not
2913 constant, but the call followed by an INDIRECT_REF is. */
2914 if (callee && DECL_DECLARED_CONSTEXPR_P (callee)
2915 && !flag_no_inline)
2916 r = maybe_constant_value (x);
2917 optimize = sv;
2918
2919 if (TREE_CODE (r) != CALL_EXPR)
2920 {
2921 if (DECL_CONSTRUCTOR_P (callee))
2922 {
2923 loc = EXPR_LOCATION (x);
2924 tree s = build_fold_indirect_ref_loc (loc,
2925 CALL_EXPR_ARG (x, 0));
2926 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r);
2927 }
2928 x = r;
2929 break;
2930 }
2931
2932 break;
2933 }
2934
2935 case CONSTRUCTOR:
2936 {
2937 unsigned i;
2938 constructor_elt *p;
2939 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x);
2940 vec<constructor_elt, va_gc> *nelts = NULL;
2941 FOR_EACH_VEC_SAFE_ELT (elts, i, p)
2942 {
2943 tree op = cp_fold (p->value);
2944 if (op != p->value)
2945 {
2946 if (op == error_mark_node)
2947 {
2948 x = error_mark_node;
2949 vec_free (nelts);
2950 break;
2951 }
2952 if (nelts == NULL)
2953 nelts = elts->copy ();
2954 (*nelts)[i].value = op;
2955 }
2956 }
2957 if (nelts)
2958 {
2959 x = build_constructor (TREE_TYPE (x), nelts);
2960 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x)
2961 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x);
2962 }
2963 if (VECTOR_TYPE_P (TREE_TYPE (x)))
2964 x = fold (x);
2965 break;
2966 }
2967 case TREE_VEC:
2968 {
2969 bool changed = false;
2970 int n = TREE_VEC_LENGTH (x);
2971
2972 for (int i = 0; i < n; i++)
2973 {
2974 tree op = cp_fold (TREE_VEC_ELT (x, i));
2975 if (op != TREE_VEC_ELT (x, i))
2976 {
2977 if (!changed)
2978 x = copy_node (x);
2979 TREE_VEC_ELT (x, i) = op;
2980 changed = true;
2981 }
2982 }
2983 }
2984
2985 break;
2986
2987 case ARRAY_REF:
2988 case ARRAY_RANGE_REF:
2989
2990 loc = EXPR_LOCATION (x);
2991 op0 = cp_fold (TREE_OPERAND (x, 0));
2992 op1 = cp_fold (TREE_OPERAND (x, 1));
2993 op2 = cp_fold (TREE_OPERAND (x, 2));
2994 op3 = cp_fold (TREE_OPERAND (x, 3));
2995
2996 if (op0 != TREE_OPERAND (x, 0)
2997 || op1 != TREE_OPERAND (x, 1)
2998 || op2 != TREE_OPERAND (x, 2)
2999 || op3 != TREE_OPERAND (x, 3))
3000 {
3001 if (op0 == error_mark_node
3002 || op1 == error_mark_node
3003 || op2 == error_mark_node
3004 || op3 == error_mark_node)
3005 x = error_mark_node;
3006 else
3007 {
3008 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3);
3009 TREE_READONLY (x) = TREE_READONLY (org_x);
3010 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x);
3011 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3012 }
3013 }
3014
3015 x = fold (x);
3016 break;
3017
3018 case SAVE_EXPR:
3019 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after
3020 folding, evaluates to an invariant. In that case no need to wrap
3021 this folded tree with a SAVE_EXPR. */
3022 r = cp_fold (TREE_OPERAND (x, 0));
3023 if (tree_invariant_p (r))
3024 x = r;
3025 break;
3026
3027 case REQUIRES_EXPR:
3028 x = evaluate_requires_expr (x);
3029 break;
3030
3031 default:
3032 return org_x;
3033 }
3034
3035 if (EXPR_P (x) && TREE_CODE (x) == code)
3036 {
3037 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x);
3038 copy_warning (x, org_x);
3039 }
3040
3041 if (!c.evaluation_restricted_p ())
3042 {
3043 fold_cache->put (org_x, x);
3044 /* Prevent that we try to fold an already folded result again. */
3045 if (x != org_x)
3046 fold_cache->put (x, x);
3047 }
3048
3049 return x;
3050 }
3051
3052 /* Look up either "hot" or "cold" in attribute list LIST. */
3053
3054 tree
lookup_hotness_attribute(tree list)3055 lookup_hotness_attribute (tree list)
3056 {
3057 for (; list; list = TREE_CHAIN (list))
3058 {
3059 tree name = get_attribute_name (list);
3060 if (is_attribute_p ("hot", name)
3061 || is_attribute_p ("cold", name)
3062 || is_attribute_p ("likely", name)
3063 || is_attribute_p ("unlikely", name))
3064 break;
3065 }
3066 return list;
3067 }
3068
3069 /* Remove both "hot" and "cold" attributes from LIST. */
3070
3071 static tree
remove_hotness_attribute(tree list)3072 remove_hotness_attribute (tree list)
3073 {
3074 list = remove_attribute ("hot", list);
3075 list = remove_attribute ("cold", list);
3076 list = remove_attribute ("likely", list);
3077 list = remove_attribute ("unlikely", list);
3078 return list;
3079 }
3080
3081 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a
3082 PREDICT_EXPR. */
3083
3084 tree
process_stmt_hotness_attribute(tree std_attrs,location_t attrs_loc)3085 process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc)
3086 {
3087 if (std_attrs == error_mark_node)
3088 return std_attrs;
3089 if (tree attr = lookup_hotness_attribute (std_attrs))
3090 {
3091 tree name = get_attribute_name (attr);
3092 bool hot = (is_attribute_p ("hot", name)
3093 || is_attribute_p ("likely", name));
3094 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL,
3095 hot ? TAKEN : NOT_TAKEN);
3096 SET_EXPR_LOCATION (pred, attrs_loc);
3097 add_stmt (pred);
3098 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr)))
3099 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE",
3100 get_attribute_name (other), name);
3101 std_attrs = remove_hotness_attribute (std_attrs);
3102 }
3103 return std_attrs;
3104 }
3105
3106 /* Helper of fold_builtin_source_location, return the
3107 std::source_location::__impl type after performing verification
3108 on it. LOC is used for reporting any errors. */
3109
3110 static tree
get_source_location_impl_type(location_t loc)3111 get_source_location_impl_type (location_t loc)
3112 {
3113 tree name = get_identifier ("source_location");
3114 tree decl = lookup_qualified_name (std_node, name);
3115 if (TREE_CODE (decl) != TYPE_DECL)
3116 {
3117 auto_diagnostic_group d;
3118 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3119 qualified_name_lookup_error (std_node, name, decl, loc);
3120 else
3121 error_at (loc, "%qD is not a type", decl);
3122 return error_mark_node;
3123 }
3124 name = get_identifier ("__impl");
3125 tree type = TREE_TYPE (decl);
3126 decl = lookup_qualified_name (type, name);
3127 if (TREE_CODE (decl) != TYPE_DECL)
3128 {
3129 auto_diagnostic_group d;
3130 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST)
3131 qualified_name_lookup_error (type, name, decl, loc);
3132 else
3133 error_at (loc, "%qD is not a type", decl);
3134 return error_mark_node;
3135 }
3136 type = TREE_TYPE (decl);
3137 if (TREE_CODE (type) != RECORD_TYPE)
3138 {
3139 error_at (loc, "%qD is not a class type", decl);
3140 return error_mark_node;
3141 }
3142
3143 int cnt = 0;
3144 for (tree field = TYPE_FIELDS (type);
3145 (field = next_initializable_field (field)) != NULL_TREE;
3146 field = DECL_CHAIN (field))
3147 {
3148 if (DECL_NAME (field) != NULL_TREE)
3149 {
3150 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3151 if (strcmp (n, "_M_file_name") == 0
3152 || strcmp (n, "_M_function_name") == 0)
3153 {
3154 if (TREE_TYPE (field) != const_string_type_node)
3155 {
3156 error_at (loc, "%qD does not have %<const char *%> type",
3157 field);
3158 return error_mark_node;
3159 }
3160 cnt++;
3161 continue;
3162 }
3163 else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0)
3164 {
3165 if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE)
3166 {
3167 error_at (loc, "%qD does not have integral type", field);
3168 return error_mark_node;
3169 }
3170 cnt++;
3171 continue;
3172 }
3173 }
3174 cnt = 0;
3175 break;
3176 }
3177 if (cnt != 4)
3178 {
3179 error_at (loc, "%<std::source_location::__impl%> does not contain only "
3180 "non-static data members %<_M_file_name%>, "
3181 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>");
3182 return error_mark_node;
3183 }
3184 return build_qualified_type (type, TYPE_QUAL_CONST);
3185 }
3186
3187 /* Type for source_location_table hash_set. */
3188 struct GTY((for_user)) source_location_table_entry {
3189 location_t loc;
3190 unsigned uid;
3191 tree var;
3192 };
3193
3194 /* Traits class for function start hash maps below. */
3195
3196 struct source_location_table_entry_hash
3197 : ggc_remove <source_location_table_entry>
3198 {
3199 typedef source_location_table_entry value_type;
3200 typedef source_location_table_entry compare_type;
3201
3202 static hashval_t
hashsource_location_table_entry_hash3203 hash (const source_location_table_entry &ref)
3204 {
3205 inchash::hash hstate (0);
3206 hstate.add_int (ref.loc);
3207 hstate.add_int (ref.uid);
3208 return hstate.end ();
3209 }
3210
3211 static bool
equalsource_location_table_entry_hash3212 equal (const source_location_table_entry &ref1,
3213 const source_location_table_entry &ref2)
3214 {
3215 return ref1.loc == ref2.loc && ref1.uid == ref2.uid;
3216 }
3217
3218 static void
mark_deletedsource_location_table_entry_hash3219 mark_deleted (source_location_table_entry &ref)
3220 {
3221 ref.loc = UNKNOWN_LOCATION;
3222 ref.uid = -1U;
3223 ref.var = NULL_TREE;
3224 }
3225
3226 static const bool empty_zero_p = true;
3227
3228 static void
mark_emptysource_location_table_entry_hash3229 mark_empty (source_location_table_entry &ref)
3230 {
3231 ref.loc = UNKNOWN_LOCATION;
3232 ref.uid = 0;
3233 ref.var = NULL_TREE;
3234 }
3235
3236 static bool
is_deletedsource_location_table_entry_hash3237 is_deleted (const source_location_table_entry &ref)
3238 {
3239 return (ref.loc == UNKNOWN_LOCATION
3240 && ref.uid == -1U
3241 && ref.var == NULL_TREE);
3242 }
3243
3244 static bool
is_emptysource_location_table_entry_hash3245 is_empty (const source_location_table_entry &ref)
3246 {
3247 return (ref.loc == UNKNOWN_LOCATION
3248 && ref.uid == 0
3249 && ref.var == NULL_TREE);
3250 }
3251
3252 static void
pch_nxsource_location_table_entry_hash3253 pch_nx (source_location_table_entry &p)
3254 {
3255 extern void gt_pch_nx (source_location_table_entry &);
3256 gt_pch_nx (p);
3257 }
3258
3259 static void
pch_nxsource_location_table_entry_hash3260 pch_nx (source_location_table_entry &p, gt_pointer_operator op, void *cookie)
3261 {
3262 extern void gt_pch_nx (source_location_table_entry *, gt_pointer_operator,
3263 void *);
3264 gt_pch_nx (&p, op, cookie);
3265 }
3266 };
3267
3268 static GTY(()) hash_table <source_location_table_entry_hash>
3269 *source_location_table;
3270 static GTY(()) unsigned int source_location_id;
3271
3272 /* Fold __builtin_source_location () call. LOC is the location
3273 of the call. */
3274
3275 tree
fold_builtin_source_location(location_t loc)3276 fold_builtin_source_location (location_t loc)
3277 {
3278 if (source_location_impl == NULL_TREE)
3279 {
3280 auto_diagnostic_group d;
3281 source_location_impl = get_source_location_impl_type (loc);
3282 if (source_location_impl == error_mark_node)
3283 inform (loc, "evaluating %qs", "__builtin_source_location");
3284 }
3285 if (source_location_impl == error_mark_node)
3286 return build_zero_cst (const_ptr_type_node);
3287 if (source_location_table == NULL)
3288 source_location_table
3289 = hash_table <source_location_table_entry_hash>::create_ggc (64);
3290 const line_map_ordinary *map;
3291 source_location_table_entry entry;
3292 entry.loc
3293 = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT,
3294 &map);
3295 entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1;
3296 entry.var = error_mark_node;
3297 source_location_table_entry *entryp
3298 = source_location_table->find_slot (entry, INSERT);
3299 tree var;
3300 if (entryp->var)
3301 var = entryp->var;
3302 else
3303 {
3304 char tmp_name[32];
3305 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++);
3306 var = build_decl (loc, VAR_DECL, get_identifier (tmp_name),
3307 source_location_impl);
3308 TREE_STATIC (var) = 1;
3309 TREE_PUBLIC (var) = 0;
3310 DECL_ARTIFICIAL (var) = 1;
3311 DECL_IGNORED_P (var) = 1;
3312 DECL_EXTERNAL (var) = 0;
3313 DECL_DECLARED_CONSTEXPR_P (var) = 1;
3314 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1;
3315 layout_decl (var, 0);
3316
3317 vec<constructor_elt, va_gc> *v = NULL;
3318 vec_alloc (v, 4);
3319 for (tree field = TYPE_FIELDS (source_location_impl);
3320 (field = next_initializable_field (field)) != NULL_TREE;
3321 field = DECL_CHAIN (field))
3322 {
3323 const char *n = IDENTIFIER_POINTER (DECL_NAME (field));
3324 tree val = NULL_TREE;
3325 if (strcmp (n, "_M_file_name") == 0)
3326 {
3327 if (const char *fname = LOCATION_FILE (loc))
3328 {
3329 fname = remap_macro_filename (fname);
3330 val = build_string_literal (strlen (fname) + 1, fname);
3331 }
3332 else
3333 val = build_string_literal (1, "");
3334 }
3335 else if (strcmp (n, "_M_function_name") == 0)
3336 {
3337 const char *name = "";
3338
3339 if (current_function_decl)
3340 name = cxx_printable_name (current_function_decl, 2);
3341
3342 val = build_string_literal (strlen (name) + 1, name);
3343 }
3344 else if (strcmp (n, "_M_line") == 0)
3345 val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc));
3346 else if (strcmp (n, "_M_column") == 0)
3347 val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc));
3348 else
3349 gcc_unreachable ();
3350 CONSTRUCTOR_APPEND_ELT (v, field, val);
3351 }
3352
3353 tree ctor = build_constructor (source_location_impl, v);
3354 TREE_CONSTANT (ctor) = 1;
3355 TREE_STATIC (ctor) = 1;
3356 DECL_INITIAL (var) = ctor;
3357 varpool_node::finalize_decl (var);
3358 *entryp = entry;
3359 entryp->var = var;
3360 }
3361
3362 return build_fold_addr_expr_with_type_loc (loc, var, const_ptr_type_node);
3363 }
3364
3365 #include "gt-cp-cp-gimplify.h"
3366