1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2022 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "tree-hash-traits.h"
57 #include "omp-general.h"
58 #include "omp-low.h"
59 #include "gimple-low.h"
60 #include "gomp-constants.h"
61 #include "splay-tree.h"
62 #include "gimple-walk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "dbgcnt.h"
69 #include "omp-offload.h"
70 #include "context.h"
71 #include "tree-nested.h"
72
73 /* Hash set of poisoned variables in a bind expr. */
74 static hash_set<tree> *asan_poisoned_variables = NULL;
75
76 enum gimplify_omp_var_data
77 {
78 GOVD_SEEN = 0x000001,
79 GOVD_EXPLICIT = 0x000002,
80 GOVD_SHARED = 0x000004,
81 GOVD_PRIVATE = 0x000008,
82 GOVD_FIRSTPRIVATE = 0x000010,
83 GOVD_LASTPRIVATE = 0x000020,
84 GOVD_REDUCTION = 0x000040,
85 GOVD_LOCAL = 0x00080,
86 GOVD_MAP = 0x000100,
87 GOVD_DEBUG_PRIVATE = 0x000200,
88 GOVD_PRIVATE_OUTER_REF = 0x000400,
89 GOVD_LINEAR = 0x000800,
90 GOVD_ALIGNED = 0x001000,
91
92 /* Flag for GOVD_MAP: don't copy back. */
93 GOVD_MAP_TO_ONLY = 0x002000,
94
95 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
96 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
97
98 GOVD_MAP_0LEN_ARRAY = 0x008000,
99
100 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
101 GOVD_MAP_ALWAYS_TO = 0x010000,
102
103 /* Flag for shared vars that are or might be stored to in the region. */
104 GOVD_WRITTEN = 0x020000,
105
106 /* Flag for GOVD_MAP, if it is a forced mapping. */
107 GOVD_MAP_FORCE = 0x040000,
108
109 /* Flag for GOVD_MAP: must be present already. */
110 GOVD_MAP_FORCE_PRESENT = 0x080000,
111
112 /* Flag for GOVD_MAP: only allocate. */
113 GOVD_MAP_ALLOC_ONLY = 0x100000,
114
115 /* Flag for GOVD_MAP: only copy back. */
116 GOVD_MAP_FROM_ONLY = 0x200000,
117
118 GOVD_NONTEMPORAL = 0x400000,
119
120 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
121 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
122
123 GOVD_CONDTEMP = 0x1000000,
124
125 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
126 GOVD_REDUCTION_INSCAN = 0x2000000,
127
128 /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for
129 fields. */
130 GOVD_MAP_HAS_ATTACHMENTS = 0x4000000,
131
132 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
133 GOVD_FIRSTPRIVATE_IMPLICIT = 0x8000000,
134
135 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
136 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
137 | GOVD_LOCAL)
138 };
139
140
141 enum omp_region_type
142 {
143 ORT_WORKSHARE = 0x00,
144 ORT_TASKGROUP = 0x01,
145 ORT_SIMD = 0x04,
146
147 ORT_PARALLEL = 0x08,
148 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
149
150 ORT_TASK = 0x10,
151 ORT_UNTIED_TASK = ORT_TASK | 1,
152 ORT_TASKLOOP = ORT_TASK | 2,
153 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
154
155 ORT_TEAMS = 0x20,
156 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
157 ORT_HOST_TEAMS = ORT_TEAMS | 2,
158 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
159
160 /* Data region. */
161 ORT_TARGET_DATA = 0x40,
162
163 /* Data region with offloading. */
164 ORT_TARGET = 0x80,
165 ORT_COMBINED_TARGET = ORT_TARGET | 1,
166 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
167
168 /* OpenACC variants. */
169 ORT_ACC = 0x100, /* A generic OpenACC region. */
170 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
171 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
172 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
173 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
174 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
175
176 /* Dummy OpenMP region, used to disable expansion of
177 DECL_VALUE_EXPRs in taskloop pre body. */
178 ORT_NONE = 0x200
179 };
180
181 /* Gimplify hashtable helper. */
182
183 struct gimplify_hasher : free_ptr_hash <elt_t>
184 {
185 static inline hashval_t hash (const elt_t *);
186 static inline bool equal (const elt_t *, const elt_t *);
187 };
188
189 struct gimplify_ctx
190 {
191 struct gimplify_ctx *prev_context;
192
193 vec<gbind *> bind_expr_stack;
194 tree temps;
195 gimple_seq conditional_cleanups;
196 tree exit_label;
197 tree return_temp;
198
199 vec<tree> case_labels;
200 hash_set<tree> *live_switch_vars;
201 /* The formal temporary table. Should this be persistent? */
202 hash_table<gimplify_hasher> *temp_htab;
203
204 int conditions;
205 unsigned into_ssa : 1;
206 unsigned allow_rhs_cond_expr : 1;
207 unsigned in_cleanup_point_expr : 1;
208 unsigned keep_stack : 1;
209 unsigned save_stack : 1;
210 unsigned in_switch_expr : 1;
211 };
212
213 enum gimplify_defaultmap_kind
214 {
215 GDMK_SCALAR,
216 GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
217 GDMK_AGGREGATE,
218 GDMK_ALLOCATABLE,
219 GDMK_POINTER
220 };
221
222 struct gimplify_omp_ctx
223 {
224 struct gimplify_omp_ctx *outer_context;
225 splay_tree variables;
226 hash_set<tree> *privatized_types;
227 tree clauses;
228 /* Iteration variables in an OMP_FOR. */
229 vec<tree> loop_iter_var;
230 location_t location;
231 enum omp_clause_default_kind default_kind;
232 enum omp_region_type region_type;
233 enum tree_code code;
234 bool combined_loop;
235 bool distribute;
236 bool target_firstprivatize_array_bases;
237 bool add_safelen1;
238 bool order_concurrent;
239 bool has_depend;
240 bool in_for_exprs;
241 int defaultmap[5];
242 };
243
244 static struct gimplify_ctx *gimplify_ctxp;
245 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
246 static bool in_omp_construct;
247
248 /* Forward declaration. */
249 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
250 static hash_map<tree, tree> *oacc_declare_returns;
251 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
252 bool (*) (tree), fallback_t, bool);
253 static void prepare_gimple_addressable (tree *, gimple_seq *);
254
255 /* Shorter alias name for the above function for use in gimplify.cc
256 only. */
257
258 static inline void
gimplify_seq_add_stmt(gimple_seq * seq_p,gimple * gs)259 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
260 {
261 gimple_seq_add_stmt_without_update (seq_p, gs);
262 }
263
264 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
265 NULL, a new sequence is allocated. This function is
266 similar to gimple_seq_add_seq, but does not scan the operands.
267 During gimplification, we need to manipulate statement sequences
268 before the def/use vectors have been constructed. */
269
270 static void
gimplify_seq_add_seq(gimple_seq * dst_p,gimple_seq src)271 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
272 {
273 gimple_stmt_iterator si;
274
275 if (src == NULL)
276 return;
277
278 si = gsi_last (*dst_p);
279 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
280 }
281
282
283 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
284 and popping gimplify contexts. */
285
286 static struct gimplify_ctx *ctx_pool = NULL;
287
288 /* Return a gimplify context struct from the pool. */
289
290 static inline struct gimplify_ctx *
ctx_alloc(void)291 ctx_alloc (void)
292 {
293 struct gimplify_ctx * c = ctx_pool;
294
295 if (c)
296 ctx_pool = c->prev_context;
297 else
298 c = XNEW (struct gimplify_ctx);
299
300 memset (c, '\0', sizeof (*c));
301 return c;
302 }
303
304 /* Put gimplify context C back into the pool. */
305
306 static inline void
ctx_free(struct gimplify_ctx * c)307 ctx_free (struct gimplify_ctx *c)
308 {
309 c->prev_context = ctx_pool;
310 ctx_pool = c;
311 }
312
313 /* Free allocated ctx stack memory. */
314
315 void
free_gimplify_stack(void)316 free_gimplify_stack (void)
317 {
318 struct gimplify_ctx *c;
319
320 while ((c = ctx_pool))
321 {
322 ctx_pool = c->prev_context;
323 free (c);
324 }
325 }
326
327
328 /* Set up a context for the gimplifier. */
329
330 void
push_gimplify_context(bool in_ssa,bool rhs_cond_ok)331 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
332 {
333 struct gimplify_ctx *c = ctx_alloc ();
334
335 c->prev_context = gimplify_ctxp;
336 gimplify_ctxp = c;
337 gimplify_ctxp->into_ssa = in_ssa;
338 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
339 }
340
341 /* Tear down a context for the gimplifier. If BODY is non-null, then
342 put the temporaries into the outer BIND_EXPR. Otherwise, put them
343 in the local_decls.
344
345 BODY is not a sequence, but the first tuple in a sequence. */
346
347 void
pop_gimplify_context(gimple * body)348 pop_gimplify_context (gimple *body)
349 {
350 struct gimplify_ctx *c = gimplify_ctxp;
351
352 gcc_assert (c
353 && (!c->bind_expr_stack.exists ()
354 || c->bind_expr_stack.is_empty ()));
355 c->bind_expr_stack.release ();
356 gimplify_ctxp = c->prev_context;
357
358 if (body)
359 declare_vars (c->temps, body, false);
360 else
361 record_vars (c->temps);
362
363 delete c->temp_htab;
364 c->temp_htab = NULL;
365 ctx_free (c);
366 }
367
368 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
369
370 static void
gimple_push_bind_expr(gbind * bind_stmt)371 gimple_push_bind_expr (gbind *bind_stmt)
372 {
373 gimplify_ctxp->bind_expr_stack.reserve (8);
374 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
375 }
376
377 /* Pop the first element off the stack of bindings. */
378
379 static void
gimple_pop_bind_expr(void)380 gimple_pop_bind_expr (void)
381 {
382 gimplify_ctxp->bind_expr_stack.pop ();
383 }
384
385 /* Return the first element of the stack of bindings. */
386
387 gbind *
gimple_current_bind_expr(void)388 gimple_current_bind_expr (void)
389 {
390 return gimplify_ctxp->bind_expr_stack.last ();
391 }
392
393 /* Return the stack of bindings created during gimplification. */
394
395 vec<gbind *>
gimple_bind_expr_stack(void)396 gimple_bind_expr_stack (void)
397 {
398 return gimplify_ctxp->bind_expr_stack;
399 }
400
401 /* Return true iff there is a COND_EXPR between us and the innermost
402 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
403
404 static bool
gimple_conditional_context(void)405 gimple_conditional_context (void)
406 {
407 return gimplify_ctxp->conditions > 0;
408 }
409
410 /* Note that we've entered a COND_EXPR. */
411
412 static void
gimple_push_condition(void)413 gimple_push_condition (void)
414 {
415 #ifdef ENABLE_GIMPLE_CHECKING
416 if (gimplify_ctxp->conditions == 0)
417 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
418 #endif
419 ++(gimplify_ctxp->conditions);
420 }
421
422 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
423 now, add any conditional cleanups we've seen to the prequeue. */
424
425 static void
gimple_pop_condition(gimple_seq * pre_p)426 gimple_pop_condition (gimple_seq *pre_p)
427 {
428 int conds = --(gimplify_ctxp->conditions);
429
430 gcc_assert (conds >= 0);
431 if (conds == 0)
432 {
433 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
434 gimplify_ctxp->conditional_cleanups = NULL;
435 }
436 }
437
438 /* A stable comparison routine for use with splay trees and DECLs. */
439
440 static int
splay_tree_compare_decl_uid(splay_tree_key xa,splay_tree_key xb)441 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
442 {
443 tree a = (tree) xa;
444 tree b = (tree) xb;
445
446 return DECL_UID (a) - DECL_UID (b);
447 }
448
449 /* Create a new omp construct that deals with variable remapping. */
450
451 static struct gimplify_omp_ctx *
new_omp_context(enum omp_region_type region_type)452 new_omp_context (enum omp_region_type region_type)
453 {
454 struct gimplify_omp_ctx *c;
455
456 c = XCNEW (struct gimplify_omp_ctx);
457 c->outer_context = gimplify_omp_ctxp;
458 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
459 c->privatized_types = new hash_set<tree>;
460 c->location = input_location;
461 c->region_type = region_type;
462 if ((region_type & ORT_TASK) == 0)
463 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
464 else
465 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
466 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
467 c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
468 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
469 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
470 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
471
472 return c;
473 }
474
475 /* Destroy an omp construct that deals with variable remapping. */
476
477 static void
delete_omp_context(struct gimplify_omp_ctx * c)478 delete_omp_context (struct gimplify_omp_ctx *c)
479 {
480 splay_tree_delete (c->variables);
481 delete c->privatized_types;
482 c->loop_iter_var.release ();
483 XDELETE (c);
484 }
485
486 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
487 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
488
489 /* Both gimplify the statement T and append it to *SEQ_P. This function
490 behaves exactly as gimplify_stmt, but you don't have to pass T as a
491 reference. */
492
493 void
gimplify_and_add(tree t,gimple_seq * seq_p)494 gimplify_and_add (tree t, gimple_seq *seq_p)
495 {
496 gimplify_stmt (&t, seq_p);
497 }
498
499 /* Gimplify statement T into sequence *SEQ_P, and return the first
500 tuple in the sequence of generated tuples for this statement.
501 Return NULL if gimplifying T produced no tuples. */
502
503 static gimple *
gimplify_and_return_first(tree t,gimple_seq * seq_p)504 gimplify_and_return_first (tree t, gimple_seq *seq_p)
505 {
506 gimple_stmt_iterator last = gsi_last (*seq_p);
507
508 gimplify_and_add (t, seq_p);
509
510 if (!gsi_end_p (last))
511 {
512 gsi_next (&last);
513 return gsi_stmt (last);
514 }
515 else
516 return gimple_seq_first_stmt (*seq_p);
517 }
518
519 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
520 LHS, or for a call argument. */
521
522 static bool
is_gimple_mem_rhs(tree t)523 is_gimple_mem_rhs (tree t)
524 {
525 /* If we're dealing with a renamable type, either source or dest must be
526 a renamed variable. */
527 if (is_gimple_reg_type (TREE_TYPE (t)))
528 return is_gimple_val (t);
529 else
530 return is_gimple_val (t) || is_gimple_lvalue (t);
531 }
532
533 /* Return true if T is a CALL_EXPR or an expression that can be
534 assigned to a temporary. Note that this predicate should only be
535 used during gimplification. See the rationale for this in
536 gimplify_modify_expr. */
537
538 static bool
is_gimple_reg_rhs_or_call(tree t)539 is_gimple_reg_rhs_or_call (tree t)
540 {
541 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
542 || TREE_CODE (t) == CALL_EXPR);
543 }
544
545 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
546 this predicate should only be used during gimplification. See the
547 rationale for this in gimplify_modify_expr. */
548
549 static bool
is_gimple_mem_rhs_or_call(tree t)550 is_gimple_mem_rhs_or_call (tree t)
551 {
552 /* If we're dealing with a renamable type, either source or dest must be
553 a renamed variable. */
554 if (is_gimple_reg_type (TREE_TYPE (t)))
555 return is_gimple_val (t);
556 else
557 return (is_gimple_val (t)
558 || is_gimple_lvalue (t)
559 || TREE_CLOBBER_P (t)
560 || TREE_CODE (t) == CALL_EXPR);
561 }
562
563 /* Create a temporary with a name derived from VAL. Subroutine of
564 lookup_tmp_var; nobody else should call this function. */
565
566 static inline tree
create_tmp_from_val(tree val)567 create_tmp_from_val (tree val)
568 {
569 /* Drop all qualifiers and address-space information from the value type. */
570 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
571 tree var = create_tmp_var (type, get_name (val));
572 return var;
573 }
574
575 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
576 an existing expression temporary. */
577
578 static tree
lookup_tmp_var(tree val,bool is_formal)579 lookup_tmp_var (tree val, bool is_formal)
580 {
581 tree ret;
582
583 /* If not optimizing, never really reuse a temporary. local-alloc
584 won't allocate any variable that is used in more than one basic
585 block, which means it will go into memory, causing much extra
586 work in reload and final and poorer code generation, outweighing
587 the extra memory allocation here. */
588 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
589 ret = create_tmp_from_val (val);
590 else
591 {
592 elt_t elt, *elt_p;
593 elt_t **slot;
594
595 elt.val = val;
596 if (!gimplify_ctxp->temp_htab)
597 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
598 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
599 if (*slot == NULL)
600 {
601 elt_p = XNEW (elt_t);
602 elt_p->val = val;
603 elt_p->temp = ret = create_tmp_from_val (val);
604 *slot = elt_p;
605 }
606 else
607 {
608 elt_p = *slot;
609 ret = elt_p->temp;
610 }
611 }
612
613 return ret;
614 }
615
616 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
617
618 static tree
internal_get_tmp_var(tree val,gimple_seq * pre_p,gimple_seq * post_p,bool is_formal,bool allow_ssa)619 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
620 bool is_formal, bool allow_ssa)
621 {
622 tree t, mod;
623
624 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
625 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
626 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
627 fb_rvalue);
628
629 if (allow_ssa
630 && gimplify_ctxp->into_ssa
631 && is_gimple_reg_type (TREE_TYPE (val)))
632 {
633 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
634 if (! gimple_in_ssa_p (cfun))
635 {
636 const char *name = get_name (val);
637 if (name)
638 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
639 }
640 }
641 else
642 t = lookup_tmp_var (val, is_formal);
643
644 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
645
646 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
647
648 /* gimplify_modify_expr might want to reduce this further. */
649 gimplify_and_add (mod, pre_p);
650 ggc_free (mod);
651
652 return t;
653 }
654
655 /* Return a formal temporary variable initialized with VAL. PRE_P is as
656 in gimplify_expr. Only use this function if:
657
658 1) The value of the unfactored expression represented by VAL will not
659 change between the initialization and use of the temporary, and
660 2) The temporary will not be otherwise modified.
661
662 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
663 and #2 means it is inappropriate for && temps.
664
665 For other cases, use get_initialized_tmp_var instead. */
666
667 tree
get_formal_tmp_var(tree val,gimple_seq * pre_p)668 get_formal_tmp_var (tree val, gimple_seq *pre_p)
669 {
670 return internal_get_tmp_var (val, pre_p, NULL, true, true);
671 }
672
673 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
674 are as in gimplify_expr. */
675
676 tree
get_initialized_tmp_var(tree val,gimple_seq * pre_p,gimple_seq * post_p,bool allow_ssa)677 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
678 gimple_seq *post_p /* = NULL */,
679 bool allow_ssa /* = true */)
680 {
681 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
682 }
683
684 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
685 generate debug info for them; otherwise don't. */
686
687 void
declare_vars(tree vars,gimple * gs,bool debug_info)688 declare_vars (tree vars, gimple *gs, bool debug_info)
689 {
690 tree last = vars;
691 if (last)
692 {
693 tree temps, block;
694
695 gbind *scope = as_a <gbind *> (gs);
696
697 temps = nreverse (last);
698
699 block = gimple_bind_block (scope);
700 gcc_assert (!block || TREE_CODE (block) == BLOCK);
701 if (!block || !debug_info)
702 {
703 DECL_CHAIN (last) = gimple_bind_vars (scope);
704 gimple_bind_set_vars (scope, temps);
705 }
706 else
707 {
708 /* We need to attach the nodes both to the BIND_EXPR and to its
709 associated BLOCK for debugging purposes. The key point here
710 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
711 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
712 if (BLOCK_VARS (block))
713 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
714 else
715 {
716 gimple_bind_set_vars (scope,
717 chainon (gimple_bind_vars (scope), temps));
718 BLOCK_VARS (block) = temps;
719 }
720 }
721 }
722 }
723
724 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
725 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
726 no such upper bound can be obtained. */
727
728 static void
force_constant_size(tree var)729 force_constant_size (tree var)
730 {
731 /* The only attempt we make is by querying the maximum size of objects
732 of the variable's type. */
733
734 HOST_WIDE_INT max_size;
735
736 gcc_assert (VAR_P (var));
737
738 max_size = max_int_size_in_bytes (TREE_TYPE (var));
739
740 gcc_assert (max_size >= 0);
741
742 DECL_SIZE_UNIT (var)
743 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
744 DECL_SIZE (var)
745 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
746 }
747
748 /* Push the temporary variable TMP into the current binding. */
749
750 void
gimple_add_tmp_var_fn(struct function * fn,tree tmp)751 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
752 {
753 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
754
755 /* Later processing assumes that the object size is constant, which might
756 not be true at this point. Force the use of a constant upper bound in
757 this case. */
758 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
759 force_constant_size (tmp);
760
761 DECL_CONTEXT (tmp) = fn->decl;
762 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
763
764 record_vars_into (tmp, fn->decl);
765 }
766
767 /* Push the temporary variable TMP into the current binding. */
768
769 void
gimple_add_tmp_var(tree tmp)770 gimple_add_tmp_var (tree tmp)
771 {
772 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
773
774 /* Later processing assumes that the object size is constant, which might
775 not be true at this point. Force the use of a constant upper bound in
776 this case. */
777 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
778 force_constant_size (tmp);
779
780 DECL_CONTEXT (tmp) = current_function_decl;
781 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
782
783 if (gimplify_ctxp)
784 {
785 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
786 gimplify_ctxp->temps = tmp;
787
788 /* Mark temporaries local within the nearest enclosing parallel. */
789 if (gimplify_omp_ctxp)
790 {
791 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
792 int flag = GOVD_LOCAL | GOVD_SEEN;
793 while (ctx
794 && (ctx->region_type == ORT_WORKSHARE
795 || ctx->region_type == ORT_TASKGROUP
796 || ctx->region_type == ORT_SIMD
797 || ctx->region_type == ORT_ACC))
798 {
799 if (ctx->region_type == ORT_SIMD
800 && TREE_ADDRESSABLE (tmp)
801 && !TREE_STATIC (tmp))
802 {
803 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
804 ctx->add_safelen1 = true;
805 else if (ctx->in_for_exprs)
806 flag = GOVD_PRIVATE;
807 else
808 flag = GOVD_PRIVATE | GOVD_SEEN;
809 break;
810 }
811 ctx = ctx->outer_context;
812 }
813 if (ctx)
814 omp_add_variable (ctx, tmp, flag);
815 }
816 }
817 else if (cfun)
818 record_vars (tmp);
819 else
820 {
821 gimple_seq body_seq;
822
823 /* This case is for nested functions. We need to expose the locals
824 they create. */
825 body_seq = gimple_body (current_function_decl);
826 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
827 }
828 }
829
830
831
832 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
833 nodes that are referenced more than once in GENERIC functions. This is
834 necessary because gimplification (translation into GIMPLE) is performed
835 by modifying tree nodes in-place, so gimplication of a shared node in a
836 first context could generate an invalid GIMPLE form in a second context.
837
838 This is achieved with a simple mark/copy/unmark algorithm that walks the
839 GENERIC representation top-down, marks nodes with TREE_VISITED the first
840 time it encounters them, duplicates them if they already have TREE_VISITED
841 set, and finally removes the TREE_VISITED marks it has set.
842
843 The algorithm works only at the function level, i.e. it generates a GENERIC
844 representation of a function with no nodes shared within the function when
845 passed a GENERIC function (except for nodes that are allowed to be shared).
846
847 At the global level, it is also necessary to unshare tree nodes that are
848 referenced in more than one function, for the same aforementioned reason.
849 This requires some cooperation from the front-end. There are 2 strategies:
850
851 1. Manual unsharing. The front-end needs to call unshare_expr on every
852 expression that might end up being shared across functions.
853
854 2. Deep unsharing. This is an extension of regular unsharing. Instead
855 of calling unshare_expr on expressions that might be shared across
856 functions, the front-end pre-marks them with TREE_VISITED. This will
857 ensure that they are unshared on the first reference within functions
858 when the regular unsharing algorithm runs. The counterpart is that
859 this algorithm must look deeper than for manual unsharing, which is
860 specified by LANG_HOOKS_DEEP_UNSHARING.
861
862 If there are only few specific cases of node sharing across functions, it is
863 probably easier for a front-end to unshare the expressions manually. On the
864 contrary, if the expressions generated at the global level are as widespread
865 as expressions generated within functions, deep unsharing is very likely the
866 way to go. */
867
868 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
869 These nodes model computations that must be done once. If we were to
870 unshare something like SAVE_EXPR(i++), the gimplification process would
871 create wrong code. However, if DATA is non-null, it must hold a pointer
872 set that is used to unshare the subtrees of these nodes. */
873
874 static tree
mostly_copy_tree_r(tree * tp,int * walk_subtrees,void * data)875 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
876 {
877 tree t = *tp;
878 enum tree_code code = TREE_CODE (t);
879
880 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
881 copy their subtrees if we can make sure to do it only once. */
882 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
883 {
884 if (data && !((hash_set<tree> *)data)->add (t))
885 ;
886 else
887 *walk_subtrees = 0;
888 }
889
890 /* Stop at types, decls, constants like copy_tree_r. */
891 else if (TREE_CODE_CLASS (code) == tcc_type
892 || TREE_CODE_CLASS (code) == tcc_declaration
893 || TREE_CODE_CLASS (code) == tcc_constant)
894 *walk_subtrees = 0;
895
896 /* Cope with the statement expression extension. */
897 else if (code == STATEMENT_LIST)
898 ;
899
900 /* Leave the bulk of the work to copy_tree_r itself. */
901 else
902 copy_tree_r (tp, walk_subtrees, NULL);
903
904 return NULL_TREE;
905 }
906
907 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
908 If *TP has been visited already, then *TP is deeply copied by calling
909 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
910
911 static tree
copy_if_shared_r(tree * tp,int * walk_subtrees,void * data)912 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
913 {
914 tree t = *tp;
915 enum tree_code code = TREE_CODE (t);
916
917 /* Skip types, decls, and constants. But we do want to look at their
918 types and the bounds of types. Mark them as visited so we properly
919 unmark their subtrees on the unmark pass. If we've already seen them,
920 don't look down further. */
921 if (TREE_CODE_CLASS (code) == tcc_type
922 || TREE_CODE_CLASS (code) == tcc_declaration
923 || TREE_CODE_CLASS (code) == tcc_constant)
924 {
925 if (TREE_VISITED (t))
926 *walk_subtrees = 0;
927 else
928 TREE_VISITED (t) = 1;
929 }
930
931 /* If this node has been visited already, unshare it and don't look
932 any deeper. */
933 else if (TREE_VISITED (t))
934 {
935 walk_tree (tp, mostly_copy_tree_r, data, NULL);
936 *walk_subtrees = 0;
937 }
938
939 /* Otherwise, mark the node as visited and keep looking. */
940 else
941 TREE_VISITED (t) = 1;
942
943 return NULL_TREE;
944 }
945
946 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
947 copy_if_shared_r callback unmodified. */
948
949 void
copy_if_shared(tree * tp,void * data)950 copy_if_shared (tree *tp, void *data)
951 {
952 walk_tree (tp, copy_if_shared_r, data, NULL);
953 }
954
955 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
956 any nested functions. */
957
958 static void
unshare_body(tree fndecl)959 unshare_body (tree fndecl)
960 {
961 struct cgraph_node *cgn = cgraph_node::get (fndecl);
962 /* If the language requires deep unsharing, we need a pointer set to make
963 sure we don't repeatedly unshare subtrees of unshareable nodes. */
964 hash_set<tree> *visited
965 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
966
967 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
968 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
969 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
970
971 delete visited;
972
973 if (cgn)
974 for (cgn = first_nested_function (cgn); cgn;
975 cgn = next_nested_function (cgn))
976 unshare_body (cgn->decl);
977 }
978
979 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
980 Subtrees are walked until the first unvisited node is encountered. */
981
982 static tree
unmark_visited_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)983 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
984 {
985 tree t = *tp;
986
987 /* If this node has been visited, unmark it and keep looking. */
988 if (TREE_VISITED (t))
989 TREE_VISITED (t) = 0;
990
991 /* Otherwise, don't look any deeper. */
992 else
993 *walk_subtrees = 0;
994
995 return NULL_TREE;
996 }
997
998 /* Unmark the visited trees rooted at *TP. */
999
1000 static inline void
unmark_visited(tree * tp)1001 unmark_visited (tree *tp)
1002 {
1003 walk_tree (tp, unmark_visited_r, NULL, NULL);
1004 }
1005
1006 /* Likewise, but mark all trees as not visited. */
1007
1008 static void
unvisit_body(tree fndecl)1009 unvisit_body (tree fndecl)
1010 {
1011 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1012
1013 unmark_visited (&DECL_SAVED_TREE (fndecl));
1014 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1015 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1016
1017 if (cgn)
1018 for (cgn = first_nested_function (cgn);
1019 cgn; cgn = next_nested_function (cgn))
1020 unvisit_body (cgn->decl);
1021 }
1022
1023 /* Unconditionally make an unshared copy of EXPR. This is used when using
1024 stored expressions which span multiple functions, such as BINFO_VTABLE,
1025 as the normal unsharing process can't tell that they're shared. */
1026
1027 tree
unshare_expr(tree expr)1028 unshare_expr (tree expr)
1029 {
1030 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1031 return expr;
1032 }
1033
1034 /* Worker for unshare_expr_without_location. */
1035
1036 static tree
prune_expr_location(tree * tp,int * walk_subtrees,void *)1037 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1038 {
1039 if (EXPR_P (*tp))
1040 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1041 else
1042 *walk_subtrees = 0;
1043 return NULL_TREE;
1044 }
1045
1046 /* Similar to unshare_expr but also prune all expression locations
1047 from EXPR. */
1048
1049 tree
unshare_expr_without_location(tree expr)1050 unshare_expr_without_location (tree expr)
1051 {
1052 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1053 if (EXPR_P (expr))
1054 walk_tree (&expr, prune_expr_location, NULL, NULL);
1055 return expr;
1056 }
1057
1058 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1059 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1060 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1061 EXPR is the location of the EXPR. */
1062
1063 static location_t
rexpr_location(tree expr,location_t or_else=UNKNOWN_LOCATION)1064 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1065 {
1066 if (!expr)
1067 return or_else;
1068
1069 if (EXPR_HAS_LOCATION (expr))
1070 return EXPR_LOCATION (expr);
1071
1072 if (TREE_CODE (expr) != STATEMENT_LIST)
1073 return or_else;
1074
1075 tree_stmt_iterator i = tsi_start (expr);
1076
1077 bool found = false;
1078 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1079 {
1080 found = true;
1081 tsi_next (&i);
1082 }
1083
1084 if (!found || !tsi_one_before_end_p (i))
1085 return or_else;
1086
1087 return rexpr_location (tsi_stmt (i), or_else);
1088 }
1089
1090 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1091 rexpr_location for the potential recursion. */
1092
1093 static inline bool
rexpr_has_location(tree expr)1094 rexpr_has_location (tree expr)
1095 {
1096 return rexpr_location (expr) != UNKNOWN_LOCATION;
1097 }
1098
1099
1100 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1101 contain statements and have a value. Assign its value to a temporary
1102 and give it void_type_node. Return the temporary, or NULL_TREE if
1103 WRAPPER was already void. */
1104
1105 tree
voidify_wrapper_expr(tree wrapper,tree temp)1106 voidify_wrapper_expr (tree wrapper, tree temp)
1107 {
1108 tree type = TREE_TYPE (wrapper);
1109 if (type && !VOID_TYPE_P (type))
1110 {
1111 tree *p;
1112
1113 /* Set p to point to the body of the wrapper. Loop until we find
1114 something that isn't a wrapper. */
1115 for (p = &wrapper; p && *p; )
1116 {
1117 switch (TREE_CODE (*p))
1118 {
1119 case BIND_EXPR:
1120 TREE_SIDE_EFFECTS (*p) = 1;
1121 TREE_TYPE (*p) = void_type_node;
1122 /* For a BIND_EXPR, the body is operand 1. */
1123 p = &BIND_EXPR_BODY (*p);
1124 break;
1125
1126 case CLEANUP_POINT_EXPR:
1127 case TRY_FINALLY_EXPR:
1128 case TRY_CATCH_EXPR:
1129 TREE_SIDE_EFFECTS (*p) = 1;
1130 TREE_TYPE (*p) = void_type_node;
1131 p = &TREE_OPERAND (*p, 0);
1132 break;
1133
1134 case STATEMENT_LIST:
1135 {
1136 tree_stmt_iterator i = tsi_last (*p);
1137 TREE_SIDE_EFFECTS (*p) = 1;
1138 TREE_TYPE (*p) = void_type_node;
1139 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1140 }
1141 break;
1142
1143 case COMPOUND_EXPR:
1144 /* Advance to the last statement. Set all container types to
1145 void. */
1146 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1147 {
1148 TREE_SIDE_EFFECTS (*p) = 1;
1149 TREE_TYPE (*p) = void_type_node;
1150 }
1151 break;
1152
1153 case TRANSACTION_EXPR:
1154 TREE_SIDE_EFFECTS (*p) = 1;
1155 TREE_TYPE (*p) = void_type_node;
1156 p = &TRANSACTION_EXPR_BODY (*p);
1157 break;
1158
1159 default:
1160 /* Assume that any tree upon which voidify_wrapper_expr is
1161 directly called is a wrapper, and that its body is op0. */
1162 if (p == &wrapper)
1163 {
1164 TREE_SIDE_EFFECTS (*p) = 1;
1165 TREE_TYPE (*p) = void_type_node;
1166 p = &TREE_OPERAND (*p, 0);
1167 break;
1168 }
1169 goto out;
1170 }
1171 }
1172
1173 out:
1174 if (p == NULL || IS_EMPTY_STMT (*p))
1175 temp = NULL_TREE;
1176 else if (temp)
1177 {
1178 /* The wrapper is on the RHS of an assignment that we're pushing
1179 down. */
1180 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1181 || TREE_CODE (temp) == MODIFY_EXPR);
1182 TREE_OPERAND (temp, 1) = *p;
1183 *p = temp;
1184 }
1185 else
1186 {
1187 temp = create_tmp_var (type, "retval");
1188 *p = build2 (INIT_EXPR, type, temp, *p);
1189 }
1190
1191 return temp;
1192 }
1193
1194 return NULL_TREE;
1195 }
1196
1197 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1198 a temporary through which they communicate. */
1199
1200 static void
build_stack_save_restore(gcall ** save,gcall ** restore)1201 build_stack_save_restore (gcall **save, gcall **restore)
1202 {
1203 tree tmp_var;
1204
1205 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1206 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1207 gimple_call_set_lhs (*save, tmp_var);
1208
1209 *restore
1210 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1211 1, tmp_var);
1212 }
1213
1214 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1215
1216 static tree
build_asan_poison_call_expr(tree decl)1217 build_asan_poison_call_expr (tree decl)
1218 {
1219 /* Do not poison variables that have size equal to zero. */
1220 tree unit_size = DECL_SIZE_UNIT (decl);
1221 if (zerop (unit_size))
1222 return NULL_TREE;
1223
1224 tree base = build_fold_addr_expr (decl);
1225
1226 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1227 void_type_node, 3,
1228 build_int_cst (integer_type_node,
1229 ASAN_MARK_POISON),
1230 base, unit_size);
1231 }
1232
1233 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1234 on POISON flag, shadow memory of a DECL variable. The call will be
1235 put on location identified by IT iterator, where BEFORE flag drives
1236 position where the stmt will be put. */
1237
1238 static void
asan_poison_variable(tree decl,bool poison,gimple_stmt_iterator * it,bool before)1239 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1240 bool before)
1241 {
1242 tree unit_size = DECL_SIZE_UNIT (decl);
1243 tree base = build_fold_addr_expr (decl);
1244
1245 /* Do not poison variables that have size equal to zero. */
1246 if (zerop (unit_size))
1247 return;
1248
1249 /* It's necessary to have all stack variables aligned to ASAN granularity
1250 bytes. */
1251 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1252 unsigned shadow_granularity
1253 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
1254 if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1255 SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1256
1257 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1258
1259 gimple *g
1260 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1261 build_int_cst (integer_type_node, flags),
1262 base, unit_size);
1263
1264 if (before)
1265 gsi_insert_before (it, g, GSI_NEW_STMT);
1266 else
1267 gsi_insert_after (it, g, GSI_NEW_STMT);
1268 }
1269
1270 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1271 either poisons or unpoisons a DECL. Created statement is appended
1272 to SEQ_P gimple sequence. */
1273
1274 static void
asan_poison_variable(tree decl,bool poison,gimple_seq * seq_p)1275 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1276 {
1277 gimple_stmt_iterator it = gsi_last (*seq_p);
1278 bool before = false;
1279
1280 if (gsi_end_p (it))
1281 before = true;
1282
1283 asan_poison_variable (decl, poison, &it, before);
1284 }
1285
1286 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1287
1288 static int
sort_by_decl_uid(const void * a,const void * b)1289 sort_by_decl_uid (const void *a, const void *b)
1290 {
1291 const tree *t1 = (const tree *)a;
1292 const tree *t2 = (const tree *)b;
1293
1294 int uid1 = DECL_UID (*t1);
1295 int uid2 = DECL_UID (*t2);
1296
1297 if (uid1 < uid2)
1298 return -1;
1299 else if (uid1 > uid2)
1300 return 1;
1301 else
1302 return 0;
1303 }
1304
1305 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1306 depending on POISON flag. Created statement is appended
1307 to SEQ_P gimple sequence. */
1308
1309 static void
asan_poison_variables(hash_set<tree> * variables,bool poison,gimple_seq * seq_p)1310 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1311 {
1312 unsigned c = variables->elements ();
1313 if (c == 0)
1314 return;
1315
1316 auto_vec<tree> sorted_variables (c);
1317
1318 for (hash_set<tree>::iterator it = variables->begin ();
1319 it != variables->end (); ++it)
1320 sorted_variables.safe_push (*it);
1321
1322 sorted_variables.qsort (sort_by_decl_uid);
1323
1324 unsigned i;
1325 tree var;
1326 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1327 {
1328 asan_poison_variable (var, poison, seq_p);
1329
1330 /* Add use_after_scope_memory attribute for the variable in order
1331 to prevent re-written into SSA. */
1332 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1333 DECL_ATTRIBUTES (var)))
1334 DECL_ATTRIBUTES (var)
1335 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1336 integer_one_node,
1337 DECL_ATTRIBUTES (var));
1338 }
1339 }
1340
1341 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1342
1343 static enum gimplify_status
gimplify_bind_expr(tree * expr_p,gimple_seq * pre_p)1344 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1345 {
1346 tree bind_expr = *expr_p;
1347 bool old_keep_stack = gimplify_ctxp->keep_stack;
1348 bool old_save_stack = gimplify_ctxp->save_stack;
1349 tree t;
1350 gbind *bind_stmt;
1351 gimple_seq body, cleanup;
1352 gcall *stack_save;
1353 location_t start_locus = 0, end_locus = 0;
1354 tree ret_clauses = NULL;
1355
1356 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1357
1358 /* Mark variables seen in this bind expr. */
1359 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1360 {
1361 if (VAR_P (t))
1362 {
1363 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1364
1365 /* Mark variable as local. */
1366 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1367 {
1368 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1369 || splay_tree_lookup (ctx->variables,
1370 (splay_tree_key) t) == NULL)
1371 {
1372 int flag = GOVD_LOCAL;
1373 if (ctx->region_type == ORT_SIMD
1374 && TREE_ADDRESSABLE (t)
1375 && !TREE_STATIC (t))
1376 {
1377 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1378 ctx->add_safelen1 = true;
1379 else
1380 flag = GOVD_PRIVATE;
1381 }
1382 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1383 }
1384 /* Static locals inside of target construct or offloaded
1385 routines need to be "omp declare target". */
1386 if (TREE_STATIC (t))
1387 for (; ctx; ctx = ctx->outer_context)
1388 if ((ctx->region_type & ORT_TARGET) != 0)
1389 {
1390 if (!lookup_attribute ("omp declare target",
1391 DECL_ATTRIBUTES (t)))
1392 {
1393 tree id = get_identifier ("omp declare target");
1394 DECL_ATTRIBUTES (t)
1395 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1396 varpool_node *node = varpool_node::get (t);
1397 if (node)
1398 {
1399 node->offloadable = 1;
1400 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1401 {
1402 g->have_offload = true;
1403 if (!in_lto_p)
1404 vec_safe_push (offload_vars, t);
1405 }
1406 }
1407 }
1408 break;
1409 }
1410 }
1411
1412 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1413
1414 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1415 cfun->has_local_explicit_reg_vars = true;
1416 }
1417 }
1418
1419 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1420 BIND_EXPR_BLOCK (bind_expr));
1421 gimple_push_bind_expr (bind_stmt);
1422
1423 gimplify_ctxp->keep_stack = false;
1424 gimplify_ctxp->save_stack = false;
1425
1426 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1427 body = NULL;
1428 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1429 gimple_bind_set_body (bind_stmt, body);
1430
1431 /* Source location wise, the cleanup code (stack_restore and clobbers)
1432 belongs to the end of the block, so propagate what we have. The
1433 stack_save operation belongs to the beginning of block, which we can
1434 infer from the bind_expr directly if the block has no explicit
1435 assignment. */
1436 if (BIND_EXPR_BLOCK (bind_expr))
1437 {
1438 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1439 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1440 }
1441 if (start_locus == 0)
1442 start_locus = EXPR_LOCATION (bind_expr);
1443
1444 cleanup = NULL;
1445 stack_save = NULL;
1446
1447 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1448 the stack space allocated to the VLAs. */
1449 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1450 {
1451 gcall *stack_restore;
1452
1453 /* Save stack on entry and restore it on exit. Add a try_finally
1454 block to achieve this. */
1455 build_stack_save_restore (&stack_save, &stack_restore);
1456
1457 gimple_set_location (stack_save, start_locus);
1458 gimple_set_location (stack_restore, end_locus);
1459
1460 gimplify_seq_add_stmt (&cleanup, stack_restore);
1461 }
1462
1463 /* Add clobbers for all variables that go out of scope. */
1464 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1465 {
1466 if (VAR_P (t)
1467 && !is_global_var (t)
1468 && DECL_CONTEXT (t) == current_function_decl)
1469 {
1470 if (!DECL_HARD_REGISTER (t)
1471 && !TREE_THIS_VOLATILE (t)
1472 && !DECL_HAS_VALUE_EXPR_P (t)
1473 /* Only care for variables that have to be in memory. Others
1474 will be rewritten into SSA names, hence moved to the
1475 top-level. */
1476 && !is_gimple_reg (t)
1477 && flag_stack_reuse != SR_NONE)
1478 {
1479 tree clobber = build_clobber (TREE_TYPE (t), CLOBBER_EOL);
1480 gimple *clobber_stmt;
1481 clobber_stmt = gimple_build_assign (t, clobber);
1482 gimple_set_location (clobber_stmt, end_locus);
1483 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1484 }
1485
1486 if (flag_openacc && oacc_declare_returns != NULL)
1487 {
1488 tree key = t;
1489 if (DECL_HAS_VALUE_EXPR_P (key))
1490 {
1491 key = DECL_VALUE_EXPR (key);
1492 if (TREE_CODE (key) == INDIRECT_REF)
1493 key = TREE_OPERAND (key, 0);
1494 }
1495 tree *c = oacc_declare_returns->get (key);
1496 if (c != NULL)
1497 {
1498 if (ret_clauses)
1499 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1500
1501 ret_clauses = unshare_expr (*c);
1502
1503 oacc_declare_returns->remove (key);
1504
1505 if (oacc_declare_returns->is_empty ())
1506 {
1507 delete oacc_declare_returns;
1508 oacc_declare_returns = NULL;
1509 }
1510 }
1511 }
1512 }
1513
1514 if (asan_poisoned_variables != NULL
1515 && asan_poisoned_variables->contains (t))
1516 {
1517 asan_poisoned_variables->remove (t);
1518 asan_poison_variable (t, true, &cleanup);
1519 }
1520
1521 if (gimplify_ctxp->live_switch_vars != NULL
1522 && gimplify_ctxp->live_switch_vars->contains (t))
1523 gimplify_ctxp->live_switch_vars->remove (t);
1524 }
1525
1526 if (ret_clauses)
1527 {
1528 gomp_target *stmt;
1529 gimple_stmt_iterator si = gsi_start (cleanup);
1530
1531 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1532 ret_clauses);
1533 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1534 }
1535
1536 if (cleanup)
1537 {
1538 gtry *gs;
1539 gimple_seq new_body;
1540
1541 new_body = NULL;
1542 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1543 GIMPLE_TRY_FINALLY);
1544
1545 if (stack_save)
1546 gimplify_seq_add_stmt (&new_body, stack_save);
1547 gimplify_seq_add_stmt (&new_body, gs);
1548 gimple_bind_set_body (bind_stmt, new_body);
1549 }
1550
1551 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1552 if (!gimplify_ctxp->keep_stack)
1553 gimplify_ctxp->keep_stack = old_keep_stack;
1554 gimplify_ctxp->save_stack = old_save_stack;
1555
1556 gimple_pop_bind_expr ();
1557
1558 gimplify_seq_add_stmt (pre_p, bind_stmt);
1559
1560 if (temp)
1561 {
1562 *expr_p = temp;
1563 return GS_OK;
1564 }
1565
1566 *expr_p = NULL_TREE;
1567 return GS_ALL_DONE;
1568 }
1569
1570 /* Maybe add early return predict statement to PRE_P sequence. */
1571
1572 static void
maybe_add_early_return_predict_stmt(gimple_seq * pre_p)1573 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1574 {
1575 /* If we are not in a conditional context, add PREDICT statement. */
1576 if (gimple_conditional_context ())
1577 {
1578 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1579 NOT_TAKEN);
1580 gimplify_seq_add_stmt (pre_p, predict);
1581 }
1582 }
1583
1584 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1585 GIMPLE value, it is assigned to a new temporary and the statement is
1586 re-written to return the temporary.
1587
1588 PRE_P points to the sequence where side effects that must happen before
1589 STMT should be stored. */
1590
1591 static enum gimplify_status
gimplify_return_expr(tree stmt,gimple_seq * pre_p)1592 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1593 {
1594 greturn *ret;
1595 tree ret_expr = TREE_OPERAND (stmt, 0);
1596 tree result_decl, result;
1597
1598 if (ret_expr == error_mark_node)
1599 return GS_ERROR;
1600
1601 if (!ret_expr
1602 || TREE_CODE (ret_expr) == RESULT_DECL)
1603 {
1604 maybe_add_early_return_predict_stmt (pre_p);
1605 greturn *ret = gimple_build_return (ret_expr);
1606 copy_warning (ret, stmt);
1607 gimplify_seq_add_stmt (pre_p, ret);
1608 return GS_ALL_DONE;
1609 }
1610
1611 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1612 result_decl = NULL_TREE;
1613 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1614 {
1615 /* Used in C++ for handling EH cleanup of the return value if a local
1616 cleanup throws. Assume the front-end knows what it's doing. */
1617 result_decl = DECL_RESULT (current_function_decl);
1618 /* But crash if we end up trying to modify ret_expr below. */
1619 ret_expr = NULL_TREE;
1620 }
1621 else
1622 {
1623 result_decl = TREE_OPERAND (ret_expr, 0);
1624
1625 /* See through a return by reference. */
1626 if (TREE_CODE (result_decl) == INDIRECT_REF)
1627 result_decl = TREE_OPERAND (result_decl, 0);
1628
1629 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1630 || TREE_CODE (ret_expr) == INIT_EXPR)
1631 && TREE_CODE (result_decl) == RESULT_DECL);
1632 }
1633
1634 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1635 Recall that aggregate_value_p is FALSE for any aggregate type that is
1636 returned in registers. If we're returning values in registers, then
1637 we don't want to extend the lifetime of the RESULT_DECL, particularly
1638 across another call. In addition, for those aggregates for which
1639 hard_function_value generates a PARALLEL, we'll die during normal
1640 expansion of structure assignments; there's special code in expand_return
1641 to handle this case that does not exist in expand_expr. */
1642 if (!result_decl)
1643 result = NULL_TREE;
1644 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1645 {
1646 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1647 {
1648 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1649 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1650 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1651 should be effectively allocated by the caller, i.e. all calls to
1652 this function must be subject to the Return Slot Optimization. */
1653 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1654 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1655 }
1656 result = result_decl;
1657 }
1658 else if (gimplify_ctxp->return_temp)
1659 result = gimplify_ctxp->return_temp;
1660 else
1661 {
1662 result = create_tmp_reg (TREE_TYPE (result_decl));
1663
1664 /* ??? With complex control flow (usually involving abnormal edges),
1665 we can wind up warning about an uninitialized value for this. Due
1666 to how this variable is constructed and initialized, this is never
1667 true. Give up and never warn. */
1668 suppress_warning (result, OPT_Wuninitialized);
1669
1670 gimplify_ctxp->return_temp = result;
1671 }
1672
1673 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1674 Then gimplify the whole thing. */
1675 if (result != result_decl)
1676 TREE_OPERAND (ret_expr, 0) = result;
1677
1678 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1679
1680 maybe_add_early_return_predict_stmt (pre_p);
1681 ret = gimple_build_return (result);
1682 copy_warning (ret, stmt);
1683 gimplify_seq_add_stmt (pre_p, ret);
1684
1685 return GS_ALL_DONE;
1686 }
1687
1688 /* Gimplify a variable-length array DECL. */
1689
1690 static void
gimplify_vla_decl(tree decl,gimple_seq * seq_p)1691 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1692 {
1693 /* This is a variable-sized decl. Simplify its size and mark it
1694 for deferred expansion. */
1695 tree t, addr, ptr_type;
1696
1697 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1698 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1699
1700 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1701 if (DECL_HAS_VALUE_EXPR_P (decl))
1702 return;
1703
1704 /* All occurrences of this decl in final gimplified code will be
1705 replaced by indirection. Setting DECL_VALUE_EXPR does two
1706 things: First, it lets the rest of the gimplifier know what
1707 replacement to use. Second, it lets the debug info know
1708 where to find the value. */
1709 ptr_type = build_pointer_type (TREE_TYPE (decl));
1710 addr = create_tmp_var (ptr_type, get_name (decl));
1711 DECL_IGNORED_P (addr) = 0;
1712 t = build_fold_indirect_ref (addr);
1713 TREE_THIS_NOTRAP (t) = 1;
1714 SET_DECL_VALUE_EXPR (decl, t);
1715 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1716
1717 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1718 max_int_size_in_bytes (TREE_TYPE (decl)));
1719 /* The call has been built for a variable-sized object. */
1720 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1721 t = fold_convert (ptr_type, t);
1722 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1723
1724 gimplify_and_add (t, seq_p);
1725
1726 /* Record the dynamic allocation associated with DECL if requested. */
1727 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1728 record_dynamic_alloc (decl);
1729 }
1730
1731 /* A helper function to be called via walk_tree. Mark all labels under *TP
1732 as being forced. To be called for DECL_INITIAL of static variables. */
1733
1734 static tree
force_labels_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)1735 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1736 {
1737 if (TYPE_P (*tp))
1738 *walk_subtrees = 0;
1739 if (TREE_CODE (*tp) == LABEL_DECL)
1740 {
1741 FORCED_LABEL (*tp) = 1;
1742 cfun->has_forced_label_in_static = 1;
1743 }
1744
1745 return NULL_TREE;
1746 }
1747
1748 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1749 Build a call to internal const function DEFERRED_INIT:
1750 1st argument: SIZE of the DECL;
1751 2nd argument: INIT_TYPE;
1752 3rd argument: NAME of the DECL;
1753
1754 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1755
1756 static void
gimple_add_init_for_auto_var(tree decl,enum auto_init_type init_type,gimple_seq * seq_p)1757 gimple_add_init_for_auto_var (tree decl,
1758 enum auto_init_type init_type,
1759 gimple_seq *seq_p)
1760 {
1761 gcc_assert (auto_var_p (decl));
1762 gcc_assert (init_type > AUTO_INIT_UNINITIALIZED);
1763 location_t loc = EXPR_LOCATION (decl);
1764 tree decl_size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
1765
1766 tree init_type_node
1767 = build_int_cst (integer_type_node, (int) init_type);
1768
1769 tree decl_name = NULL_TREE;
1770 if (DECL_NAME (decl))
1771
1772 decl_name = build_string_literal (IDENTIFIER_LENGTH (DECL_NAME (decl)) + 1,
1773 IDENTIFIER_POINTER (DECL_NAME (decl)));
1774
1775 else
1776 {
1777 char *decl_name_anonymous = xasprintf ("D.%u", DECL_UID (decl));
1778 decl_name = build_string_literal (strlen (decl_name_anonymous) + 1,
1779 decl_name_anonymous);
1780 free (decl_name_anonymous);
1781 }
1782
1783 tree call = build_call_expr_internal_loc (loc, IFN_DEFERRED_INIT,
1784 TREE_TYPE (decl), 3,
1785 decl_size, init_type_node,
1786 decl_name);
1787
1788 gimplify_assign (decl, call, seq_p);
1789 }
1790
1791 /* Generate padding initialization for automatic vairable DECL.
1792 C guarantees that brace-init with fewer initializers than members
1793 aggregate will initialize the rest of the aggregate as-if it were
1794 static initialization. In turn static initialization guarantees
1795 that padding is initialized to zero. So, we always initialize paddings
1796 to zeroes regardless INIT_TYPE.
1797 To do the padding initialization, we insert a call to
1798 __builtin_clear_padding (&decl, 0, for_auto_init = true).
1799 Note, we add an additional dummy argument for __builtin_clear_padding,
1800 'for_auto_init' to distinguish whether this call is for automatic
1801 variable initialization or not.
1802 */
1803 static void
gimple_add_padding_init_for_auto_var(tree decl,bool is_vla,gimple_seq * seq_p)1804 gimple_add_padding_init_for_auto_var (tree decl, bool is_vla,
1805 gimple_seq *seq_p)
1806 {
1807 tree addr_of_decl = NULL_TREE;
1808 tree fn = builtin_decl_explicit (BUILT_IN_CLEAR_PADDING);
1809
1810 if (is_vla)
1811 {
1812 /* The temporary address variable for this vla should be
1813 created in gimplify_vla_decl. */
1814 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
1815 gcc_assert (TREE_CODE (DECL_VALUE_EXPR (decl)) == INDIRECT_REF);
1816 addr_of_decl = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
1817 }
1818 else
1819 {
1820 mark_addressable (decl);
1821 addr_of_decl = build_fold_addr_expr (decl);
1822 }
1823
1824 gimple *call = gimple_build_call (fn, 2, addr_of_decl,
1825 build_one_cst (TREE_TYPE (addr_of_decl)));
1826 gimplify_seq_add_stmt (seq_p, call);
1827 }
1828
1829 /* Return true if the DECL need to be automaticly initialized by the
1830 compiler. */
1831 static bool
is_var_need_auto_init(tree decl)1832 is_var_need_auto_init (tree decl)
1833 {
1834 if (auto_var_p (decl)
1835 && (TREE_CODE (decl) != VAR_DECL
1836 || !DECL_HARD_REGISTER (decl))
1837 && (flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
1838 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl)))
1839 && !OPAQUE_TYPE_P (TREE_TYPE (decl))
1840 && !is_empty_type (TREE_TYPE (decl)))
1841 return true;
1842 return false;
1843 }
1844
1845 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1846 and initialization explicit. */
1847
1848 static enum gimplify_status
gimplify_decl_expr(tree * stmt_p,gimple_seq * seq_p)1849 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1850 {
1851 tree stmt = *stmt_p;
1852 tree decl = DECL_EXPR_DECL (stmt);
1853
1854 *stmt_p = NULL_TREE;
1855
1856 if (TREE_TYPE (decl) == error_mark_node)
1857 return GS_ERROR;
1858
1859 if ((TREE_CODE (decl) == TYPE_DECL
1860 || VAR_P (decl))
1861 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1862 {
1863 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1864 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1865 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1866 }
1867
1868 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1869 in case its size expressions contain problematic nodes like CALL_EXPR. */
1870 if (TREE_CODE (decl) == TYPE_DECL
1871 && DECL_ORIGINAL_TYPE (decl)
1872 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1873 {
1874 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1875 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1876 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1877 }
1878
1879 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1880 {
1881 tree init = DECL_INITIAL (decl);
1882 bool is_vla = false;
1883 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
1884 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
1885 If the decl has VALUE_EXPR that was created by FE (usually
1886 C++FE), it's a proxy varaible, and FE already initialized
1887 the VALUE_EXPR of it, we should not initialize it anymore. */
1888 bool decl_had_value_expr_p = DECL_HAS_VALUE_EXPR_P (decl);
1889
1890 poly_uint64 size;
1891 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1892 || (!TREE_STATIC (decl)
1893 && flag_stack_check == GENERIC_STACK_CHECK
1894 && maybe_gt (size,
1895 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1896 {
1897 gimplify_vla_decl (decl, seq_p);
1898 is_vla = true;
1899 }
1900
1901 if (asan_poisoned_variables
1902 && !is_vla
1903 && TREE_ADDRESSABLE (decl)
1904 && !TREE_STATIC (decl)
1905 && !DECL_HAS_VALUE_EXPR_P (decl)
1906 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1907 && dbg_cnt (asan_use_after_scope)
1908 && !gimplify_omp_ctxp
1909 /* GNAT introduces temporaries to hold return values of calls in
1910 initializers of variables defined in other units, so the
1911 declaration of the variable is discarded completely. We do not
1912 want to issue poison calls for such dropped variables. */
1913 && (DECL_SEEN_IN_BIND_EXPR_P (decl)
1914 || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)))
1915 {
1916 asan_poisoned_variables->add (decl);
1917 asan_poison_variable (decl, false, seq_p);
1918 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1919 gimplify_ctxp->live_switch_vars->add (decl);
1920 }
1921
1922 /* Some front ends do not explicitly declare all anonymous
1923 artificial variables. We compensate here by declaring the
1924 variables, though it would be better if the front ends would
1925 explicitly declare them. */
1926 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1927 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1928 gimple_add_tmp_var (decl);
1929
1930 if (init && init != error_mark_node)
1931 {
1932 if (!TREE_STATIC (decl))
1933 {
1934 DECL_INITIAL (decl) = NULL_TREE;
1935 init = build2 (INIT_EXPR, void_type_node, decl, init);
1936 gimplify_and_add (init, seq_p);
1937 ggc_free (init);
1938 /* Clear TREE_READONLY if we really have an initialization. */
1939 if (!DECL_INITIAL (decl)
1940 && !omp_privatize_by_reference (decl))
1941 TREE_READONLY (decl) = 0;
1942 }
1943 else
1944 /* We must still examine initializers for static variables
1945 as they may contain a label address. */
1946 walk_tree (&init, force_labels_r, NULL, NULL);
1947 }
1948 /* When there is no explicit initializer, if the user requested,
1949 We should insert an artifical initializer for this automatic
1950 variable. */
1951 else if (is_var_need_auto_init (decl)
1952 && !decl_had_value_expr_p)
1953 {
1954 gimple_add_init_for_auto_var (decl,
1955 flag_auto_var_init,
1956 seq_p);
1957 /* The expanding of a call to the above .DEFERRED_INIT will apply
1958 block initialization to the whole space covered by this variable.
1959 As a result, all the paddings will be initialized to zeroes
1960 for zero initialization and 0xFE byte-repeatable patterns for
1961 pattern initialization.
1962 In order to make the paddings as zeroes for pattern init, We
1963 should add a call to __builtin_clear_padding to clear the
1964 paddings to zero in compatiple with CLANG.
1965 We cannot insert this call if the variable is a gimple register
1966 since __builtin_clear_padding will take the address of the
1967 variable. As a result, if a long double/_Complex long double
1968 variable will spilled into stack later, its padding is 0XFE. */
1969 if (flag_auto_var_init == AUTO_INIT_PATTERN
1970 && !is_gimple_reg (decl)
1971 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl)))
1972 gimple_add_padding_init_for_auto_var (decl, is_vla, seq_p);
1973 }
1974 }
1975
1976 return GS_ALL_DONE;
1977 }
1978
1979 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1980 and replacing the LOOP_EXPR with goto, but if the loop contains an
1981 EXIT_EXPR, we need to append a label for it to jump to. */
1982
1983 static enum gimplify_status
gimplify_loop_expr(tree * expr_p,gimple_seq * pre_p)1984 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1985 {
1986 tree saved_label = gimplify_ctxp->exit_label;
1987 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1988
1989 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1990
1991 gimplify_ctxp->exit_label = NULL_TREE;
1992
1993 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1994
1995 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1996
1997 if (gimplify_ctxp->exit_label)
1998 gimplify_seq_add_stmt (pre_p,
1999 gimple_build_label (gimplify_ctxp->exit_label));
2000
2001 gimplify_ctxp->exit_label = saved_label;
2002
2003 *expr_p = NULL;
2004 return GS_ALL_DONE;
2005 }
2006
2007 /* Gimplify a statement list onto a sequence. These may be created either
2008 by an enlightened front-end, or by shortcut_cond_expr. */
2009
2010 static enum gimplify_status
gimplify_statement_list(tree * expr_p,gimple_seq * pre_p)2011 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
2012 {
2013 tree temp = voidify_wrapper_expr (*expr_p, NULL);
2014
2015 tree_stmt_iterator i = tsi_start (*expr_p);
2016
2017 while (!tsi_end_p (i))
2018 {
2019 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
2020 tsi_delink (&i);
2021 }
2022
2023 if (temp)
2024 {
2025 *expr_p = temp;
2026 return GS_OK;
2027 }
2028
2029 return GS_ALL_DONE;
2030 }
2031
2032
2033 /* Emit warning for the unreachable statment STMT if needed.
2034 Return the gimple itself when the warning is emitted, otherwise
2035 return NULL. */
2036 static gimple *
emit_warn_switch_unreachable(gimple * stmt)2037 emit_warn_switch_unreachable (gimple *stmt)
2038 {
2039 if (gimple_code (stmt) == GIMPLE_GOTO
2040 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
2041 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
2042 /* Don't warn for compiler-generated gotos. These occur
2043 in Duff's devices, for example. */
2044 return NULL;
2045 else if ((flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2046 && ((gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2047 || (gimple_call_builtin_p (stmt, BUILT_IN_CLEAR_PADDING)
2048 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)))
2049 || (is_gimple_assign (stmt)
2050 && gimple_assign_single_p (stmt)
2051 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2052 && gimple_call_internal_p (
2053 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)),
2054 IFN_DEFERRED_INIT))))
2055 /* Don't warn for compiler-generated initializations for
2056 -ftrivial-auto-var-init.
2057 There are 3 cases:
2058 case 1: a call to .DEFERRED_INIT;
2059 case 2: a call to __builtin_clear_padding with the 2nd argument is
2060 present and non-zero;
2061 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2062 that has the LHS of .DEFERRED_INIT as the RHS as following:
2063 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2064 i1 = _1. */
2065 return NULL;
2066 else
2067 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
2068 "statement will never be executed");
2069 return stmt;
2070 }
2071
2072 /* Callback for walk_gimple_seq. */
2073
2074 static tree
warn_switch_unreachable_and_auto_init_r(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)2075 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator *gsi_p,
2076 bool *handled_ops_p,
2077 struct walk_stmt_info *wi)
2078 {
2079 gimple *stmt = gsi_stmt (*gsi_p);
2080 bool unreachable_issued = wi->info != NULL;
2081
2082 *handled_ops_p = true;
2083 switch (gimple_code (stmt))
2084 {
2085 case GIMPLE_TRY:
2086 /* A compiler-generated cleanup or a user-written try block.
2087 If it's empty, don't dive into it--that would result in
2088 worse location info. */
2089 if (gimple_try_eval (stmt) == NULL)
2090 {
2091 if (warn_switch_unreachable && !unreachable_issued)
2092 wi->info = emit_warn_switch_unreachable (stmt);
2093
2094 /* Stop when auto var init warning is not on. */
2095 if (!warn_trivial_auto_var_init)
2096 return integer_zero_node;
2097 }
2098 /* Fall through. */
2099 case GIMPLE_BIND:
2100 case GIMPLE_CATCH:
2101 case GIMPLE_EH_FILTER:
2102 case GIMPLE_TRANSACTION:
2103 /* Walk the sub-statements. */
2104 *handled_ops_p = false;
2105 break;
2106
2107 case GIMPLE_DEBUG:
2108 /* Ignore these. We may generate them before declarations that
2109 are never executed. If there's something to warn about,
2110 there will be non-debug stmts too, and we'll catch those. */
2111 break;
2112
2113 case GIMPLE_LABEL:
2114 /* Stop till the first Label. */
2115 return integer_zero_node;
2116 case GIMPLE_CALL:
2117 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2118 {
2119 *handled_ops_p = false;
2120 break;
2121 }
2122 if (warn_trivial_auto_var_init
2123 && flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2124 && gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2125 {
2126 /* Get the variable name from the 3rd argument of call. */
2127 tree var_name = gimple_call_arg (stmt, 2);
2128 var_name = TREE_OPERAND (TREE_OPERAND (var_name, 0), 0);
2129 const char *var_name_str = TREE_STRING_POINTER (var_name);
2130
2131 warning_at (gimple_location (stmt), OPT_Wtrivial_auto_var_init,
2132 "%qs cannot be initialized with"
2133 "%<-ftrivial-auto-var_init%>",
2134 var_name_str);
2135 break;
2136 }
2137
2138 /* Fall through. */
2139 default:
2140 /* check the first "real" statement (not a decl/lexical scope/...), issue
2141 warning if needed. */
2142 if (warn_switch_unreachable && !unreachable_issued)
2143 wi->info = emit_warn_switch_unreachable (stmt);
2144 /* Stop when auto var init warning is not on. */
2145 if (!warn_trivial_auto_var_init)
2146 return integer_zero_node;
2147 break;
2148 }
2149 return NULL_TREE;
2150 }
2151
2152
2153 /* Possibly warn about unreachable statements between switch's controlling
2154 expression and the first case. Also warn about -ftrivial-auto-var-init
2155 cannot initialize the auto variable under such situation.
2156 SEQ is the body of a switch expression. */
2157
2158 static void
maybe_warn_switch_unreachable_and_auto_init(gimple_seq seq)2159 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq)
2160 {
2161 if ((!warn_switch_unreachable && !warn_trivial_auto_var_init)
2162 /* This warning doesn't play well with Fortran when optimizations
2163 are on. */
2164 || lang_GNU_Fortran ()
2165 || seq == NULL)
2166 return;
2167
2168 struct walk_stmt_info wi;
2169
2170 memset (&wi, 0, sizeof (wi));
2171 walk_gimple_seq (seq, warn_switch_unreachable_and_auto_init_r, NULL, &wi);
2172 }
2173
2174
2175 /* A label entry that pairs label and a location. */
2176 struct label_entry
2177 {
2178 tree label;
2179 location_t loc;
2180 };
2181
2182 /* Find LABEL in vector of label entries VEC. */
2183
2184 static struct label_entry *
find_label_entry(const auto_vec<struct label_entry> * vec,tree label)2185 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
2186 {
2187 unsigned int i;
2188 struct label_entry *l;
2189
2190 FOR_EACH_VEC_ELT (*vec, i, l)
2191 if (l->label == label)
2192 return l;
2193 return NULL;
2194 }
2195
2196 /* Return true if LABEL, a LABEL_DECL, represents a case label
2197 in a vector of labels CASES. */
2198
2199 static bool
case_label_p(const vec<tree> * cases,tree label)2200 case_label_p (const vec<tree> *cases, tree label)
2201 {
2202 unsigned int i;
2203 tree l;
2204
2205 FOR_EACH_VEC_ELT (*cases, i, l)
2206 if (CASE_LABEL (l) == label)
2207 return true;
2208 return false;
2209 }
2210
2211 /* Find the last nondebug statement in a scope STMT. */
2212
2213 static gimple *
last_stmt_in_scope(gimple * stmt)2214 last_stmt_in_scope (gimple *stmt)
2215 {
2216 if (!stmt)
2217 return NULL;
2218
2219 switch (gimple_code (stmt))
2220 {
2221 case GIMPLE_BIND:
2222 {
2223 gbind *bind = as_a <gbind *> (stmt);
2224 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2225 return last_stmt_in_scope (stmt);
2226 }
2227
2228 case GIMPLE_TRY:
2229 {
2230 gtry *try_stmt = as_a <gtry *> (stmt);
2231 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2232 gimple *last_eval = last_stmt_in_scope (stmt);
2233 if (gimple_stmt_may_fallthru (last_eval)
2234 && (last_eval == NULL
2235 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2236 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2237 {
2238 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2239 return last_stmt_in_scope (stmt);
2240 }
2241 else
2242 return last_eval;
2243 }
2244
2245 case GIMPLE_DEBUG:
2246 gcc_unreachable ();
2247
2248 default:
2249 return stmt;
2250 }
2251 }
2252
2253 /* Collect labels that may fall through into LABELS and return the statement
2254 preceding another case label, or a user-defined label. Store a location
2255 useful to give warnings at *PREVLOC (usually the location of the returned
2256 statement or of its surrounding scope). */
2257
2258 static gimple *
collect_fallthrough_labels(gimple_stmt_iterator * gsi_p,auto_vec<struct label_entry> * labels,location_t * prevloc)2259 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2260 auto_vec <struct label_entry> *labels,
2261 location_t *prevloc)
2262 {
2263 gimple *prev = NULL;
2264
2265 *prevloc = UNKNOWN_LOCATION;
2266 do
2267 {
2268 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2269 {
2270 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2271 which starts on a GIMPLE_SWITCH and ends with a break label.
2272 Handle that as a single statement that can fall through. */
2273 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2274 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2275 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2276 if (last
2277 && gimple_code (first) == GIMPLE_SWITCH
2278 && gimple_code (last) == GIMPLE_LABEL)
2279 {
2280 tree label = gimple_label_label (as_a <glabel *> (last));
2281 if (SWITCH_BREAK_LABEL_P (label))
2282 {
2283 prev = bind;
2284 gsi_next (gsi_p);
2285 continue;
2286 }
2287 }
2288 }
2289 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2290 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2291 {
2292 /* Nested scope. Only look at the last statement of
2293 the innermost scope. */
2294 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2295 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2296 if (last)
2297 {
2298 prev = last;
2299 /* It might be a label without a location. Use the
2300 location of the scope then. */
2301 if (!gimple_has_location (prev))
2302 *prevloc = bind_loc;
2303 }
2304 gsi_next (gsi_p);
2305 continue;
2306 }
2307
2308 /* Ifs are tricky. */
2309 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2310 {
2311 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2312 tree false_lab = gimple_cond_false_label (cond_stmt);
2313 location_t if_loc = gimple_location (cond_stmt);
2314
2315 /* If we have e.g.
2316 if (i > 1) goto <D.2259>; else goto D;
2317 we can't do much with the else-branch. */
2318 if (!DECL_ARTIFICIAL (false_lab))
2319 break;
2320
2321 /* Go on until the false label, then one step back. */
2322 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2323 {
2324 gimple *stmt = gsi_stmt (*gsi_p);
2325 if (gimple_code (stmt) == GIMPLE_LABEL
2326 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2327 break;
2328 }
2329
2330 /* Not found? Oops. */
2331 if (gsi_end_p (*gsi_p))
2332 break;
2333
2334 /* A dead label can't fall through. */
2335 if (!UNUSED_LABEL_P (false_lab))
2336 {
2337 struct label_entry l = { false_lab, if_loc };
2338 labels->safe_push (l);
2339 }
2340
2341 /* Go to the last statement of the then branch. */
2342 gsi_prev (gsi_p);
2343
2344 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2345 <D.1759>:
2346 <stmt>;
2347 goto <D.1761>;
2348 <D.1760>:
2349 */
2350 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2351 && !gimple_has_location (gsi_stmt (*gsi_p)))
2352 {
2353 /* Look at the statement before, it might be
2354 attribute fallthrough, in which case don't warn. */
2355 gsi_prev (gsi_p);
2356 bool fallthru_before_dest
2357 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2358 gsi_next (gsi_p);
2359 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2360 if (!fallthru_before_dest)
2361 {
2362 struct label_entry l = { goto_dest, if_loc };
2363 labels->safe_push (l);
2364 }
2365 }
2366 /* This case is about
2367 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2368 <D.2022>:
2369 n = n + 1; // #1
2370 <D.2023>: // #2
2371 <D.1988>: // #3
2372 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2373 through to #3. So set PREV to #1. */
2374 else if (UNUSED_LABEL_P (false_lab))
2375 prev = gsi_stmt (*gsi_p);
2376
2377 /* And move back. */
2378 gsi_next (gsi_p);
2379 }
2380
2381 /* Remember the last statement. Skip labels that are of no interest
2382 to us. */
2383 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2384 {
2385 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2386 if (find_label_entry (labels, label))
2387 prev = gsi_stmt (*gsi_p);
2388 }
2389 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2390 ;
2391 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2392 ;
2393 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2394 prev = gsi_stmt (*gsi_p);
2395 gsi_next (gsi_p);
2396 }
2397 while (!gsi_end_p (*gsi_p)
2398 /* Stop if we find a case or a user-defined label. */
2399 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2400 || !gimple_has_location (gsi_stmt (*gsi_p))));
2401
2402 if (prev && gimple_has_location (prev))
2403 *prevloc = gimple_location (prev);
2404 return prev;
2405 }
2406
2407 /* Return true if the switch fallthough warning should occur. LABEL is
2408 the label statement that we're falling through to. */
2409
2410 static bool
should_warn_for_implicit_fallthrough(gimple_stmt_iterator * gsi_p,tree label)2411 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2412 {
2413 gimple_stmt_iterator gsi = *gsi_p;
2414
2415 /* Don't warn if the label is marked with a "falls through" comment. */
2416 if (FALLTHROUGH_LABEL_P (label))
2417 return false;
2418
2419 /* Don't warn for non-case labels followed by a statement:
2420 case 0:
2421 foo ();
2422 label:
2423 bar ();
2424 as these are likely intentional. */
2425 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2426 {
2427 tree l;
2428 while (!gsi_end_p (gsi)
2429 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2430 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2431 && !case_label_p (&gimplify_ctxp->case_labels, l))
2432 gsi_next_nondebug (&gsi);
2433 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2434 return false;
2435 }
2436
2437 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2438 immediately breaks. */
2439 gsi = *gsi_p;
2440
2441 /* Skip all immediately following labels. */
2442 while (!gsi_end_p (gsi)
2443 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2444 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2445 gsi_next_nondebug (&gsi);
2446
2447 /* { ... something; default:; } */
2448 if (gsi_end_p (gsi)
2449 /* { ... something; default: break; } or
2450 { ... something; default: goto L; } */
2451 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2452 /* { ... something; default: return; } */
2453 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2454 return false;
2455
2456 return true;
2457 }
2458
2459 /* Callback for walk_gimple_seq. */
2460
2461 static tree
warn_implicit_fallthrough_r(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info *)2462 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2463 struct walk_stmt_info *)
2464 {
2465 gimple *stmt = gsi_stmt (*gsi_p);
2466
2467 *handled_ops_p = true;
2468 switch (gimple_code (stmt))
2469 {
2470 case GIMPLE_TRY:
2471 case GIMPLE_BIND:
2472 case GIMPLE_CATCH:
2473 case GIMPLE_EH_FILTER:
2474 case GIMPLE_TRANSACTION:
2475 /* Walk the sub-statements. */
2476 *handled_ops_p = false;
2477 break;
2478
2479 /* Find a sequence of form:
2480
2481 GIMPLE_LABEL
2482 [...]
2483 <may fallthru stmt>
2484 GIMPLE_LABEL
2485
2486 and possibly warn. */
2487 case GIMPLE_LABEL:
2488 {
2489 /* Found a label. Skip all immediately following labels. */
2490 while (!gsi_end_p (*gsi_p)
2491 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2492 gsi_next_nondebug (gsi_p);
2493
2494 /* There might be no more statements. */
2495 if (gsi_end_p (*gsi_p))
2496 return integer_zero_node;
2497
2498 /* Vector of labels that fall through. */
2499 auto_vec <struct label_entry> labels;
2500 location_t prevloc;
2501 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2502
2503 /* There might be no more statements. */
2504 if (gsi_end_p (*gsi_p))
2505 return integer_zero_node;
2506
2507 gimple *next = gsi_stmt (*gsi_p);
2508 tree label;
2509 /* If what follows is a label, then we may have a fallthrough. */
2510 if (gimple_code (next) == GIMPLE_LABEL
2511 && gimple_has_location (next)
2512 && (label = gimple_label_label (as_a <glabel *> (next)))
2513 && prev != NULL)
2514 {
2515 struct label_entry *l;
2516 bool warned_p = false;
2517 auto_diagnostic_group d;
2518 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2519 /* Quiet. */;
2520 else if (gimple_code (prev) == GIMPLE_LABEL
2521 && (label = gimple_label_label (as_a <glabel *> (prev)))
2522 && (l = find_label_entry (&labels, label)))
2523 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2524 "this statement may fall through");
2525 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2526 /* Try to be clever and don't warn when the statement
2527 can't actually fall through. */
2528 && gimple_stmt_may_fallthru (prev)
2529 && prevloc != UNKNOWN_LOCATION)
2530 warned_p = warning_at (prevloc,
2531 OPT_Wimplicit_fallthrough_,
2532 "this statement may fall through");
2533 if (warned_p)
2534 inform (gimple_location (next), "here");
2535
2536 /* Mark this label as processed so as to prevent multiple
2537 warnings in nested switches. */
2538 FALLTHROUGH_LABEL_P (label) = true;
2539
2540 /* So that next warn_implicit_fallthrough_r will start looking for
2541 a new sequence starting with this label. */
2542 gsi_prev (gsi_p);
2543 }
2544 }
2545 break;
2546 default:
2547 break;
2548 }
2549 return NULL_TREE;
2550 }
2551
2552 /* Warn when a switch case falls through. */
2553
2554 static void
maybe_warn_implicit_fallthrough(gimple_seq seq)2555 maybe_warn_implicit_fallthrough (gimple_seq seq)
2556 {
2557 if (!warn_implicit_fallthrough)
2558 return;
2559
2560 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2561 if (!(lang_GNU_C ()
2562 || lang_GNU_CXX ()
2563 || lang_GNU_OBJC ()))
2564 return;
2565
2566 struct walk_stmt_info wi;
2567 memset (&wi, 0, sizeof (wi));
2568 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2569 }
2570
2571 /* Callback for walk_gimple_seq. */
2572
2573 static tree
expand_FALLTHROUGH_r(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)2574 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2575 struct walk_stmt_info *wi)
2576 {
2577 gimple *stmt = gsi_stmt (*gsi_p);
2578
2579 *handled_ops_p = true;
2580 switch (gimple_code (stmt))
2581 {
2582 case GIMPLE_TRY:
2583 case GIMPLE_BIND:
2584 case GIMPLE_CATCH:
2585 case GIMPLE_EH_FILTER:
2586 case GIMPLE_TRANSACTION:
2587 /* Walk the sub-statements. */
2588 *handled_ops_p = false;
2589 break;
2590 case GIMPLE_CALL:
2591 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2592 {
2593 gsi_remove (gsi_p, true);
2594 if (gsi_end_p (*gsi_p))
2595 {
2596 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2597 return integer_zero_node;
2598 }
2599
2600 bool found = false;
2601 location_t loc = gimple_location (stmt);
2602
2603 gimple_stmt_iterator gsi2 = *gsi_p;
2604 stmt = gsi_stmt (gsi2);
2605 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2606 {
2607 /* Go on until the artificial label. */
2608 tree goto_dest = gimple_goto_dest (stmt);
2609 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2610 {
2611 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2612 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2613 == goto_dest)
2614 break;
2615 }
2616
2617 /* Not found? Stop. */
2618 if (gsi_end_p (gsi2))
2619 break;
2620
2621 /* Look one past it. */
2622 gsi_next (&gsi2);
2623 }
2624
2625 /* We're looking for a case label or default label here. */
2626 while (!gsi_end_p (gsi2))
2627 {
2628 stmt = gsi_stmt (gsi2);
2629 if (gimple_code (stmt) == GIMPLE_LABEL)
2630 {
2631 tree label = gimple_label_label (as_a <glabel *> (stmt));
2632 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2633 {
2634 found = true;
2635 break;
2636 }
2637 }
2638 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2639 ;
2640 else if (!is_gimple_debug (stmt))
2641 /* Anything else is not expected. */
2642 break;
2643 gsi_next (&gsi2);
2644 }
2645 if (!found)
2646 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2647 "a case label or default label");
2648 }
2649 break;
2650 default:
2651 break;
2652 }
2653 return NULL_TREE;
2654 }
2655
2656 /* Expand all FALLTHROUGH () calls in SEQ. */
2657
2658 static void
expand_FALLTHROUGH(gimple_seq * seq_p)2659 expand_FALLTHROUGH (gimple_seq *seq_p)
2660 {
2661 struct walk_stmt_info wi;
2662 location_t loc;
2663 memset (&wi, 0, sizeof (wi));
2664 wi.info = (void *) &loc;
2665 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2666 if (wi.callback_result == integer_zero_node)
2667 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2668 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2669 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2670 "a case label or default label");
2671 }
2672
2673
2674 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2675 branch to. */
2676
2677 static enum gimplify_status
gimplify_switch_expr(tree * expr_p,gimple_seq * pre_p)2678 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2679 {
2680 tree switch_expr = *expr_p;
2681 gimple_seq switch_body_seq = NULL;
2682 enum gimplify_status ret;
2683 tree index_type = TREE_TYPE (switch_expr);
2684 if (index_type == NULL_TREE)
2685 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2686
2687 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2688 fb_rvalue);
2689 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2690 return ret;
2691
2692 if (SWITCH_BODY (switch_expr))
2693 {
2694 vec<tree> labels;
2695 vec<tree> saved_labels;
2696 hash_set<tree> *saved_live_switch_vars = NULL;
2697 tree default_case = NULL_TREE;
2698 gswitch *switch_stmt;
2699
2700 /* Save old labels, get new ones from body, then restore the old
2701 labels. Save all the things from the switch body to append after. */
2702 saved_labels = gimplify_ctxp->case_labels;
2703 gimplify_ctxp->case_labels.create (8);
2704
2705 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2706 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2707 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2708 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2709 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2710 else
2711 gimplify_ctxp->live_switch_vars = NULL;
2712
2713 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2714 gimplify_ctxp->in_switch_expr = true;
2715
2716 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2717
2718 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2719 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq);
2720 maybe_warn_implicit_fallthrough (switch_body_seq);
2721 /* Only do this for the outermost GIMPLE_SWITCH. */
2722 if (!gimplify_ctxp->in_switch_expr)
2723 expand_FALLTHROUGH (&switch_body_seq);
2724
2725 labels = gimplify_ctxp->case_labels;
2726 gimplify_ctxp->case_labels = saved_labels;
2727
2728 if (gimplify_ctxp->live_switch_vars)
2729 {
2730 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2731 delete gimplify_ctxp->live_switch_vars;
2732 }
2733 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2734
2735 preprocess_case_label_vec_for_gimple (labels, index_type,
2736 &default_case);
2737
2738 bool add_bind = false;
2739 if (!default_case)
2740 {
2741 glabel *new_default;
2742
2743 default_case
2744 = build_case_label (NULL_TREE, NULL_TREE,
2745 create_artificial_label (UNKNOWN_LOCATION));
2746 if (old_in_switch_expr)
2747 {
2748 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2749 add_bind = true;
2750 }
2751 new_default = gimple_build_label (CASE_LABEL (default_case));
2752 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2753 }
2754 else if (old_in_switch_expr)
2755 {
2756 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2757 if (last && gimple_code (last) == GIMPLE_LABEL)
2758 {
2759 tree label = gimple_label_label (as_a <glabel *> (last));
2760 if (SWITCH_BREAK_LABEL_P (label))
2761 add_bind = true;
2762 }
2763 }
2764
2765 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2766 default_case, labels);
2767 gimple_set_location (switch_stmt, EXPR_LOCATION (switch_expr));
2768 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2769 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2770 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2771 so that we can easily find the start and end of the switch
2772 statement. */
2773 if (add_bind)
2774 {
2775 gimple_seq bind_body = NULL;
2776 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2777 gimple_seq_add_seq (&bind_body, switch_body_seq);
2778 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2779 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2780 gimplify_seq_add_stmt (pre_p, bind);
2781 }
2782 else
2783 {
2784 gimplify_seq_add_stmt (pre_p, switch_stmt);
2785 gimplify_seq_add_seq (pre_p, switch_body_seq);
2786 }
2787 labels.release ();
2788 }
2789 else
2790 gcc_unreachable ();
2791
2792 return GS_ALL_DONE;
2793 }
2794
2795 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2796
2797 static enum gimplify_status
gimplify_label_expr(tree * expr_p,gimple_seq * pre_p)2798 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2799 {
2800 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2801 == current_function_decl);
2802
2803 tree label = LABEL_EXPR_LABEL (*expr_p);
2804 glabel *label_stmt = gimple_build_label (label);
2805 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2806 gimplify_seq_add_stmt (pre_p, label_stmt);
2807
2808 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2809 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2810 NOT_TAKEN));
2811 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2812 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2813 TAKEN));
2814
2815 return GS_ALL_DONE;
2816 }
2817
2818 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2819
2820 static enum gimplify_status
gimplify_case_label_expr(tree * expr_p,gimple_seq * pre_p)2821 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2822 {
2823 struct gimplify_ctx *ctxp;
2824 glabel *label_stmt;
2825
2826 /* Invalid programs can play Duff's Device type games with, for example,
2827 #pragma omp parallel. At least in the C front end, we don't
2828 detect such invalid branches until after gimplification, in the
2829 diagnose_omp_blocks pass. */
2830 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2831 if (ctxp->case_labels.exists ())
2832 break;
2833
2834 tree label = CASE_LABEL (*expr_p);
2835 label_stmt = gimple_build_label (label);
2836 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2837 ctxp->case_labels.safe_push (*expr_p);
2838 gimplify_seq_add_stmt (pre_p, label_stmt);
2839
2840 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2841 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2842 NOT_TAKEN));
2843 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2844 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2845 TAKEN));
2846
2847 return GS_ALL_DONE;
2848 }
2849
2850 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2851 if necessary. */
2852
2853 tree
build_and_jump(tree * label_p)2854 build_and_jump (tree *label_p)
2855 {
2856 if (label_p == NULL)
2857 /* If there's nowhere to jump, just fall through. */
2858 return NULL_TREE;
2859
2860 if (*label_p == NULL_TREE)
2861 {
2862 tree label = create_artificial_label (UNKNOWN_LOCATION);
2863 *label_p = label;
2864 }
2865
2866 return build1 (GOTO_EXPR, void_type_node, *label_p);
2867 }
2868
2869 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2870 This also involves building a label to jump to and communicating it to
2871 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2872
2873 static enum gimplify_status
gimplify_exit_expr(tree * expr_p)2874 gimplify_exit_expr (tree *expr_p)
2875 {
2876 tree cond = TREE_OPERAND (*expr_p, 0);
2877 tree expr;
2878
2879 expr = build_and_jump (&gimplify_ctxp->exit_label);
2880 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2881 *expr_p = expr;
2882
2883 return GS_OK;
2884 }
2885
2886 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2887 different from its canonical type, wrap the whole thing inside a
2888 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2889 type.
2890
2891 The canonical type of a COMPONENT_REF is the type of the field being
2892 referenced--unless the field is a bit-field which can be read directly
2893 in a smaller mode, in which case the canonical type is the
2894 sign-appropriate type corresponding to that mode. */
2895
2896 static void
canonicalize_component_ref(tree * expr_p)2897 canonicalize_component_ref (tree *expr_p)
2898 {
2899 tree expr = *expr_p;
2900 tree type;
2901
2902 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2903
2904 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2905 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2906 else
2907 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2908
2909 /* One could argue that all the stuff below is not necessary for
2910 the non-bitfield case and declare it a FE error if type
2911 adjustment would be needed. */
2912 if (TREE_TYPE (expr) != type)
2913 {
2914 #ifdef ENABLE_TYPES_CHECKING
2915 tree old_type = TREE_TYPE (expr);
2916 #endif
2917 int type_quals;
2918
2919 /* We need to preserve qualifiers and propagate them from
2920 operand 0. */
2921 type_quals = TYPE_QUALS (type)
2922 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2923 if (TYPE_QUALS (type) != type_quals)
2924 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2925
2926 /* Set the type of the COMPONENT_REF to the underlying type. */
2927 TREE_TYPE (expr) = type;
2928
2929 #ifdef ENABLE_TYPES_CHECKING
2930 /* It is now a FE error, if the conversion from the canonical
2931 type to the original expression type is not useless. */
2932 gcc_assert (useless_type_conversion_p (old_type, type));
2933 #endif
2934 }
2935 }
2936
2937 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2938 to foo, embed that change in the ADDR_EXPR by converting
2939 T array[U];
2940 (T *)&array
2941 ==>
2942 &array[L]
2943 where L is the lower bound. For simplicity, only do this for constant
2944 lower bound.
2945 The constraint is that the type of &array[L] is trivially convertible
2946 to T *. */
2947
2948 static void
canonicalize_addr_expr(tree * expr_p)2949 canonicalize_addr_expr (tree *expr_p)
2950 {
2951 tree expr = *expr_p;
2952 tree addr_expr = TREE_OPERAND (expr, 0);
2953 tree datype, ddatype, pddatype;
2954
2955 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2956 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2957 || TREE_CODE (addr_expr) != ADDR_EXPR)
2958 return;
2959
2960 /* The addr_expr type should be a pointer to an array. */
2961 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2962 if (TREE_CODE (datype) != ARRAY_TYPE)
2963 return;
2964
2965 /* The pointer to element type shall be trivially convertible to
2966 the expression pointer type. */
2967 ddatype = TREE_TYPE (datype);
2968 pddatype = build_pointer_type (ddatype);
2969 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2970 pddatype))
2971 return;
2972
2973 /* The lower bound and element sizes must be constant. */
2974 if (!TYPE_SIZE_UNIT (ddatype)
2975 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2976 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2977 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2978 return;
2979
2980 /* All checks succeeded. Build a new node to merge the cast. */
2981 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2982 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2983 NULL_TREE, NULL_TREE);
2984 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2985
2986 /* We can have stripped a required restrict qualifier above. */
2987 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2988 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2989 }
2990
2991 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2992 underneath as appropriate. */
2993
2994 static enum gimplify_status
gimplify_conversion(tree * expr_p)2995 gimplify_conversion (tree *expr_p)
2996 {
2997 location_t loc = EXPR_LOCATION (*expr_p);
2998 gcc_assert (CONVERT_EXPR_P (*expr_p));
2999
3000 /* Then strip away all but the outermost conversion. */
3001 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
3002
3003 /* And remove the outermost conversion if it's useless. */
3004 if (tree_ssa_useless_type_conversion (*expr_p))
3005 *expr_p = TREE_OPERAND (*expr_p, 0);
3006
3007 /* If we still have a conversion at the toplevel,
3008 then canonicalize some constructs. */
3009 if (CONVERT_EXPR_P (*expr_p))
3010 {
3011 tree sub = TREE_OPERAND (*expr_p, 0);
3012
3013 /* If a NOP conversion is changing the type of a COMPONENT_REF
3014 expression, then canonicalize its type now in order to expose more
3015 redundant conversions. */
3016 if (TREE_CODE (sub) == COMPONENT_REF)
3017 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
3018
3019 /* If a NOP conversion is changing a pointer to array of foo
3020 to a pointer to foo, embed that change in the ADDR_EXPR. */
3021 else if (TREE_CODE (sub) == ADDR_EXPR)
3022 canonicalize_addr_expr (expr_p);
3023 }
3024
3025 /* If we have a conversion to a non-register type force the
3026 use of a VIEW_CONVERT_EXPR instead. */
3027 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
3028 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
3029 TREE_OPERAND (*expr_p, 0));
3030
3031 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3032 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
3033 TREE_SET_CODE (*expr_p, NOP_EXPR);
3034
3035 return GS_OK;
3036 }
3037
3038 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3039 DECL_VALUE_EXPR, and it's worth re-examining things. */
3040
3041 static enum gimplify_status
gimplify_var_or_parm_decl(tree * expr_p)3042 gimplify_var_or_parm_decl (tree *expr_p)
3043 {
3044 tree decl = *expr_p;
3045
3046 /* ??? If this is a local variable, and it has not been seen in any
3047 outer BIND_EXPR, then it's probably the result of a duplicate
3048 declaration, for which we've already issued an error. It would
3049 be really nice if the front end wouldn't leak these at all.
3050 Currently the only known culprit is C++ destructors, as seen
3051 in g++.old-deja/g++.jason/binding.C.
3052 Another possible culpit are size expressions for variably modified
3053 types which are lost in the FE or not gimplified correctly. */
3054 if (VAR_P (decl)
3055 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
3056 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
3057 && decl_function_context (decl) == current_function_decl)
3058 {
3059 gcc_assert (seen_error ());
3060 return GS_ERROR;
3061 }
3062
3063 /* When within an OMP context, notice uses of variables. */
3064 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
3065 return GS_ALL_DONE;
3066
3067 /* If the decl is an alias for another expression, substitute it now. */
3068 if (DECL_HAS_VALUE_EXPR_P (decl))
3069 {
3070 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
3071 return GS_OK;
3072 }
3073
3074 return GS_ALL_DONE;
3075 }
3076
3077 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3078
3079 static void
recalculate_side_effects(tree t)3080 recalculate_side_effects (tree t)
3081 {
3082 enum tree_code code = TREE_CODE (t);
3083 int len = TREE_OPERAND_LENGTH (t);
3084 int i;
3085
3086 switch (TREE_CODE_CLASS (code))
3087 {
3088 case tcc_expression:
3089 switch (code)
3090 {
3091 case INIT_EXPR:
3092 case MODIFY_EXPR:
3093 case VA_ARG_EXPR:
3094 case PREDECREMENT_EXPR:
3095 case PREINCREMENT_EXPR:
3096 case POSTDECREMENT_EXPR:
3097 case POSTINCREMENT_EXPR:
3098 /* All of these have side-effects, no matter what their
3099 operands are. */
3100 return;
3101
3102 default:
3103 break;
3104 }
3105 /* Fall through. */
3106
3107 case tcc_comparison: /* a comparison expression */
3108 case tcc_unary: /* a unary arithmetic expression */
3109 case tcc_binary: /* a binary arithmetic expression */
3110 case tcc_reference: /* a reference */
3111 case tcc_vl_exp: /* a function call */
3112 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3113 for (i = 0; i < len; ++i)
3114 {
3115 tree op = TREE_OPERAND (t, i);
3116 if (op && TREE_SIDE_EFFECTS (op))
3117 TREE_SIDE_EFFECTS (t) = 1;
3118 }
3119 break;
3120
3121 case tcc_constant:
3122 /* No side-effects. */
3123 return;
3124
3125 default:
3126 gcc_unreachable ();
3127 }
3128 }
3129
3130 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3131 node *EXPR_P.
3132
3133 compound_lval
3134 : min_lval '[' val ']'
3135 | min_lval '.' ID
3136 | compound_lval '[' val ']'
3137 | compound_lval '.' ID
3138
3139 This is not part of the original SIMPLE definition, which separates
3140 array and member references, but it seems reasonable to handle them
3141 together. Also, this way we don't run into problems with union
3142 aliasing; gcc requires that for accesses through a union to alias, the
3143 union reference must be explicit, which was not always the case when we
3144 were splitting up array and member refs.
3145
3146 PRE_P points to the sequence where side effects that must happen before
3147 *EXPR_P should be stored.
3148
3149 POST_P points to the sequence where side effects that must happen after
3150 *EXPR_P should be stored. */
3151
3152 static enum gimplify_status
gimplify_compound_lval(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,fallback_t fallback)3153 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3154 fallback_t fallback)
3155 {
3156 tree *p;
3157 enum gimplify_status ret = GS_ALL_DONE, tret;
3158 int i;
3159 location_t loc = EXPR_LOCATION (*expr_p);
3160 tree expr = *expr_p;
3161
3162 /* Create a stack of the subexpressions so later we can walk them in
3163 order from inner to outer. */
3164 auto_vec<tree, 10> expr_stack;
3165
3166 /* We can handle anything that get_inner_reference can deal with. */
3167 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
3168 {
3169 restart:
3170 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3171 if (TREE_CODE (*p) == INDIRECT_REF)
3172 *p = fold_indirect_ref_loc (loc, *p);
3173
3174 if (handled_component_p (*p))
3175 ;
3176 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3177 additional COMPONENT_REFs. */
3178 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
3179 && gimplify_var_or_parm_decl (p) == GS_OK)
3180 goto restart;
3181 else
3182 break;
3183
3184 expr_stack.safe_push (*p);
3185 }
3186
3187 gcc_assert (expr_stack.length ());
3188
3189 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3190 walked through and P points to the innermost expression.
3191
3192 Java requires that we elaborated nodes in source order. That
3193 means we must gimplify the inner expression followed by each of
3194 the indices, in order. But we can't gimplify the inner
3195 expression until we deal with any variable bounds, sizes, or
3196 positions in order to deal with PLACEHOLDER_EXPRs.
3197
3198 The base expression may contain a statement expression that
3199 has declarations used in size expressions, so has to be
3200 gimplified before gimplifying the size expressions.
3201
3202 So we do this in three steps. First we deal with variable
3203 bounds, sizes, and positions, then we gimplify the base and
3204 ensure it is memory if needed, then we deal with the annotations
3205 for any variables in the components and any indices, from left
3206 to right. */
3207
3208 bool need_non_reg = false;
3209 for (i = expr_stack.length () - 1; i >= 0; i--)
3210 {
3211 tree t = expr_stack[i];
3212
3213 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3214 {
3215 /* Deal with the low bound and element type size and put them into
3216 the ARRAY_REF. If these values are set, they have already been
3217 gimplified. */
3218 if (TREE_OPERAND (t, 2) == NULL_TREE)
3219 {
3220 tree low = unshare_expr (array_ref_low_bound (t));
3221 if (!is_gimple_min_invariant (low))
3222 {
3223 TREE_OPERAND (t, 2) = low;
3224 }
3225 }
3226
3227 if (TREE_OPERAND (t, 3) == NULL_TREE)
3228 {
3229 tree elmt_size = array_ref_element_size (t);
3230 if (!is_gimple_min_invariant (elmt_size))
3231 {
3232 elmt_size = unshare_expr (elmt_size);
3233 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3234 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3235
3236 /* Divide the element size by the alignment of the element
3237 type (above). */
3238 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3239 elmt_size, factor);
3240
3241 TREE_OPERAND (t, 3) = elmt_size;
3242 }
3243 }
3244 need_non_reg = true;
3245 }
3246 else if (TREE_CODE (t) == COMPONENT_REF)
3247 {
3248 /* Set the field offset into T and gimplify it. */
3249 if (TREE_OPERAND (t, 2) == NULL_TREE)
3250 {
3251 tree offset = component_ref_field_offset (t);
3252 if (!is_gimple_min_invariant (offset))
3253 {
3254 offset = unshare_expr (offset);
3255 tree field = TREE_OPERAND (t, 1);
3256 tree factor
3257 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3258
3259 /* Divide the offset by its alignment. */
3260 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3261 offset, factor);
3262
3263 TREE_OPERAND (t, 2) = offset;
3264 }
3265 }
3266 need_non_reg = true;
3267 }
3268 }
3269
3270 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3271 so as to match the min_lval predicate. Failure to do so may result
3272 in the creation of large aggregate temporaries. */
3273 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3274 fallback | fb_lvalue);
3275 ret = MIN (ret, tret);
3276
3277 /* Step 2a: if we have component references we do not support on
3278 registers then make sure the base isn't a register. Of course
3279 we can only do so if an rvalue is OK. */
3280 if (need_non_reg && (fallback & fb_rvalue))
3281 prepare_gimple_addressable (p, pre_p);
3282
3283 /* Step 3: gimplify size expressions and the indices and operands of
3284 ARRAY_REF. During this loop we also remove any useless conversions. */
3285
3286 for (; expr_stack.length () > 0; )
3287 {
3288 tree t = expr_stack.pop ();
3289
3290 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3291 {
3292 /* Gimplify the low bound and element type size. */
3293 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3294 is_gimple_reg, fb_rvalue);
3295 ret = MIN (ret, tret);
3296
3297 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3298 is_gimple_reg, fb_rvalue);
3299 ret = MIN (ret, tret);
3300
3301 /* Gimplify the dimension. */
3302 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3303 is_gimple_val, fb_rvalue);
3304 ret = MIN (ret, tret);
3305 }
3306 else if (TREE_CODE (t) == COMPONENT_REF)
3307 {
3308 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3309 is_gimple_reg, fb_rvalue);
3310 ret = MIN (ret, tret);
3311 }
3312
3313 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3314
3315 /* The innermost expression P may have originally had
3316 TREE_SIDE_EFFECTS set which would have caused all the outer
3317 expressions in *EXPR_P leading to P to also have had
3318 TREE_SIDE_EFFECTS set. */
3319 recalculate_side_effects (t);
3320 }
3321
3322 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3323 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3324 {
3325 canonicalize_component_ref (expr_p);
3326 }
3327
3328 expr_stack.release ();
3329
3330 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3331
3332 return ret;
3333 }
3334
3335 /* Gimplify the self modifying expression pointed to by EXPR_P
3336 (++, --, +=, -=).
3337
3338 PRE_P points to the list where side effects that must happen before
3339 *EXPR_P should be stored.
3340
3341 POST_P points to the list where side effects that must happen after
3342 *EXPR_P should be stored.
3343
3344 WANT_VALUE is nonzero iff we want to use the value of this expression
3345 in another expression.
3346
3347 ARITH_TYPE is the type the computation should be performed in. */
3348
3349 enum gimplify_status
gimplify_self_mod_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value,tree arith_type)3350 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3351 bool want_value, tree arith_type)
3352 {
3353 enum tree_code code;
3354 tree lhs, lvalue, rhs, t1;
3355 gimple_seq post = NULL, *orig_post_p = post_p;
3356 bool postfix;
3357 enum tree_code arith_code;
3358 enum gimplify_status ret;
3359 location_t loc = EXPR_LOCATION (*expr_p);
3360
3361 code = TREE_CODE (*expr_p);
3362
3363 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3364 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3365
3366 /* Prefix or postfix? */
3367 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3368 /* Faster to treat as prefix if result is not used. */
3369 postfix = want_value;
3370 else
3371 postfix = false;
3372
3373 /* For postfix, make sure the inner expression's post side effects
3374 are executed after side effects from this expression. */
3375 if (postfix)
3376 post_p = &post;
3377
3378 /* Add or subtract? */
3379 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3380 arith_code = PLUS_EXPR;
3381 else
3382 arith_code = MINUS_EXPR;
3383
3384 /* Gimplify the LHS into a GIMPLE lvalue. */
3385 lvalue = TREE_OPERAND (*expr_p, 0);
3386 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3387 if (ret == GS_ERROR)
3388 return ret;
3389
3390 /* Extract the operands to the arithmetic operation. */
3391 lhs = lvalue;
3392 rhs = TREE_OPERAND (*expr_p, 1);
3393
3394 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3395 that as the result value and in the postqueue operation. */
3396 if (postfix)
3397 {
3398 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3399 if (ret == GS_ERROR)
3400 return ret;
3401
3402 lhs = get_initialized_tmp_var (lhs, pre_p);
3403 }
3404
3405 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3406 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3407 {
3408 rhs = convert_to_ptrofftype_loc (loc, rhs);
3409 if (arith_code == MINUS_EXPR)
3410 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3411 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3412 }
3413 else
3414 t1 = fold_convert (TREE_TYPE (*expr_p),
3415 fold_build2 (arith_code, arith_type,
3416 fold_convert (arith_type, lhs),
3417 fold_convert (arith_type, rhs)));
3418
3419 if (postfix)
3420 {
3421 gimplify_assign (lvalue, t1, pre_p);
3422 gimplify_seq_add_seq (orig_post_p, post);
3423 *expr_p = lhs;
3424 return GS_ALL_DONE;
3425 }
3426 else
3427 {
3428 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3429 return GS_OK;
3430 }
3431 }
3432
3433 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3434
3435 static void
maybe_with_size_expr(tree * expr_p)3436 maybe_with_size_expr (tree *expr_p)
3437 {
3438 tree expr = *expr_p;
3439 tree type = TREE_TYPE (expr);
3440 tree size;
3441
3442 /* If we've already wrapped this or the type is error_mark_node, we can't do
3443 anything. */
3444 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3445 || type == error_mark_node)
3446 return;
3447
3448 /* If the size isn't known or is a constant, we have nothing to do. */
3449 size = TYPE_SIZE_UNIT (type);
3450 if (!size || poly_int_tree_p (size))
3451 return;
3452
3453 /* Otherwise, make a WITH_SIZE_EXPR. */
3454 size = unshare_expr (size);
3455 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3456 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3457 }
3458
3459 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3460 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3461 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3462 gimplified to an SSA name. */
3463
3464 enum gimplify_status
gimplify_arg(tree * arg_p,gimple_seq * pre_p,location_t call_location,bool allow_ssa)3465 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3466 bool allow_ssa)
3467 {
3468 bool (*test) (tree);
3469 fallback_t fb;
3470
3471 /* In general, we allow lvalues for function arguments to avoid
3472 extra overhead of copying large aggregates out of even larger
3473 aggregates into temporaries only to copy the temporaries to
3474 the argument list. Make optimizers happy by pulling out to
3475 temporaries those types that fit in registers. */
3476 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3477 test = is_gimple_val, fb = fb_rvalue;
3478 else
3479 {
3480 test = is_gimple_lvalue, fb = fb_either;
3481 /* Also strip a TARGET_EXPR that would force an extra copy. */
3482 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3483 {
3484 tree init = TARGET_EXPR_INITIAL (*arg_p);
3485 if (init
3486 && !VOID_TYPE_P (TREE_TYPE (init)))
3487 *arg_p = init;
3488 }
3489 }
3490
3491 /* If this is a variable sized type, we must remember the size. */
3492 maybe_with_size_expr (arg_p);
3493
3494 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3495 /* Make sure arguments have the same location as the function call
3496 itself. */
3497 protected_set_expr_location (*arg_p, call_location);
3498
3499 /* There is a sequence point before a function call. Side effects in
3500 the argument list must occur before the actual call. So, when
3501 gimplifying arguments, force gimplify_expr to use an internal
3502 post queue which is then appended to the end of PRE_P. */
3503 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3504 }
3505
3506 /* Don't fold inside offloading or taskreg regions: it can break code by
3507 adding decl references that weren't in the source. We'll do it during
3508 omplower pass instead. */
3509
3510 static bool
maybe_fold_stmt(gimple_stmt_iterator * gsi)3511 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3512 {
3513 struct gimplify_omp_ctx *ctx;
3514 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3515 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3516 return false;
3517 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3518 return false;
3519 /* Delay folding of builtins until the IL is in consistent state
3520 so the diagnostic machinery can do a better job. */
3521 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3522 return false;
3523 return fold_stmt (gsi);
3524 }
3525
3526 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3527 WANT_VALUE is true if the result of the call is desired. */
3528
3529 static enum gimplify_status
gimplify_call_expr(tree * expr_p,gimple_seq * pre_p,bool want_value)3530 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3531 {
3532 tree fndecl, parms, p, fnptrtype;
3533 enum gimplify_status ret;
3534 int i, nargs;
3535 gcall *call;
3536 bool builtin_va_start_p = false;
3537 location_t loc = EXPR_LOCATION (*expr_p);
3538
3539 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3540
3541 /* For reliable diagnostics during inlining, it is necessary that
3542 every call_expr be annotated with file and line. */
3543 if (! EXPR_HAS_LOCATION (*expr_p))
3544 SET_EXPR_LOCATION (*expr_p, input_location);
3545
3546 /* Gimplify internal functions created in the FEs. */
3547 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3548 {
3549 if (want_value)
3550 return GS_ALL_DONE;
3551
3552 nargs = call_expr_nargs (*expr_p);
3553 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3554 auto_vec<tree> vargs (nargs);
3555
3556 for (i = 0; i < nargs; i++)
3557 {
3558 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3559 EXPR_LOCATION (*expr_p));
3560 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3561 }
3562
3563 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3564 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3565 gimplify_seq_add_stmt (pre_p, call);
3566 return GS_ALL_DONE;
3567 }
3568
3569 /* This may be a call to a builtin function.
3570
3571 Builtin function calls may be transformed into different
3572 (and more efficient) builtin function calls under certain
3573 circumstances. Unfortunately, gimplification can muck things
3574 up enough that the builtin expanders are not aware that certain
3575 transformations are still valid.
3576
3577 So we attempt transformation/gimplification of the call before
3578 we gimplify the CALL_EXPR. At this time we do not manage to
3579 transform all calls in the same manner as the expanders do, but
3580 we do transform most of them. */
3581 fndecl = get_callee_fndecl (*expr_p);
3582 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3583 switch (DECL_FUNCTION_CODE (fndecl))
3584 {
3585 CASE_BUILT_IN_ALLOCA:
3586 /* If the call has been built for a variable-sized object, then we
3587 want to restore the stack level when the enclosing BIND_EXPR is
3588 exited to reclaim the allocated space; otherwise, we precisely
3589 need to do the opposite and preserve the latest stack level. */
3590 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3591 gimplify_ctxp->save_stack = true;
3592 else
3593 gimplify_ctxp->keep_stack = true;
3594 break;
3595
3596 case BUILT_IN_VA_START:
3597 {
3598 builtin_va_start_p = TRUE;
3599 if (call_expr_nargs (*expr_p) < 2)
3600 {
3601 error ("too few arguments to function %<va_start%>");
3602 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3603 return GS_OK;
3604 }
3605
3606 if (fold_builtin_next_arg (*expr_p, true))
3607 {
3608 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3609 return GS_OK;
3610 }
3611 break;
3612 }
3613
3614 case BUILT_IN_EH_RETURN:
3615 cfun->calls_eh_return = true;
3616 break;
3617
3618 case BUILT_IN_CLEAR_PADDING:
3619 if (call_expr_nargs (*expr_p) == 1)
3620 {
3621 /* Remember the original type of the argument in an internal
3622 dummy second argument, as in GIMPLE pointer conversions are
3623 useless. Also mark this call as not for automatic
3624 initialization in the internal dummy third argument. */
3625 p = CALL_EXPR_ARG (*expr_p, 0);
3626 *expr_p
3627 = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
3628 build_zero_cst (TREE_TYPE (p)));
3629 return GS_OK;
3630 }
3631 break;
3632
3633 default:
3634 ;
3635 }
3636 if (fndecl && fndecl_built_in_p (fndecl))
3637 {
3638 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3639 if (new_tree && new_tree != *expr_p)
3640 {
3641 /* There was a transformation of this call which computes the
3642 same value, but in a more efficient way. Return and try
3643 again. */
3644 *expr_p = new_tree;
3645 return GS_OK;
3646 }
3647 }
3648
3649 /* Remember the original function pointer type. */
3650 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3651
3652 if (flag_openmp
3653 && fndecl
3654 && cfun
3655 && (cfun->curr_properties & PROP_gimple_any) == 0)
3656 {
3657 tree variant = omp_resolve_declare_variant (fndecl);
3658 if (variant != fndecl)
3659 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3660 }
3661
3662 /* There is a sequence point before the call, so any side effects in
3663 the calling expression must occur before the actual call. Force
3664 gimplify_expr to use an internal post queue. */
3665 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3666 is_gimple_call_addr, fb_rvalue);
3667
3668 nargs = call_expr_nargs (*expr_p);
3669
3670 /* Get argument types for verification. */
3671 fndecl = get_callee_fndecl (*expr_p);
3672 parms = NULL_TREE;
3673 if (fndecl)
3674 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3675 else
3676 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3677
3678 if (fndecl && DECL_ARGUMENTS (fndecl))
3679 p = DECL_ARGUMENTS (fndecl);
3680 else if (parms)
3681 p = parms;
3682 else
3683 p = NULL_TREE;
3684 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3685 ;
3686
3687 /* If the last argument is __builtin_va_arg_pack () and it is not
3688 passed as a named argument, decrease the number of CALL_EXPR
3689 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3690 if (!p
3691 && i < nargs
3692 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3693 {
3694 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3695 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3696
3697 if (last_arg_fndecl
3698 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3699 {
3700 tree call = *expr_p;
3701
3702 --nargs;
3703 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3704 CALL_EXPR_FN (call),
3705 nargs, CALL_EXPR_ARGP (call));
3706
3707 /* Copy all CALL_EXPR flags, location and block, except
3708 CALL_EXPR_VA_ARG_PACK flag. */
3709 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3710 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3711 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3712 = CALL_EXPR_RETURN_SLOT_OPT (call);
3713 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3714 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3715
3716 /* Set CALL_EXPR_VA_ARG_PACK. */
3717 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3718 }
3719 }
3720
3721 /* If the call returns twice then after building the CFG the call
3722 argument computations will no longer dominate the call because
3723 we add an abnormal incoming edge to the call. So do not use SSA
3724 vars there. */
3725 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3726
3727 /* Gimplify the function arguments. */
3728 if (nargs > 0)
3729 {
3730 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3731 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3732 PUSH_ARGS_REVERSED ? i-- : i++)
3733 {
3734 enum gimplify_status t;
3735
3736 /* Avoid gimplifying the second argument to va_start, which needs to
3737 be the plain PARM_DECL. */
3738 if ((i != 1) || !builtin_va_start_p)
3739 {
3740 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3741 EXPR_LOCATION (*expr_p), ! returns_twice);
3742
3743 if (t == GS_ERROR)
3744 ret = GS_ERROR;
3745 }
3746 }
3747 }
3748
3749 /* Gimplify the static chain. */
3750 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3751 {
3752 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3753 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3754 else
3755 {
3756 enum gimplify_status t;
3757 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3758 EXPR_LOCATION (*expr_p), ! returns_twice);
3759 if (t == GS_ERROR)
3760 ret = GS_ERROR;
3761 }
3762 }
3763
3764 /* Verify the function result. */
3765 if (want_value && fndecl
3766 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3767 {
3768 error_at (loc, "using result of function returning %<void%>");
3769 ret = GS_ERROR;
3770 }
3771
3772 /* Try this again in case gimplification exposed something. */
3773 if (ret != GS_ERROR)
3774 {
3775 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3776
3777 if (new_tree && new_tree != *expr_p)
3778 {
3779 /* There was a transformation of this call which computes the
3780 same value, but in a more efficient way. Return and try
3781 again. */
3782 *expr_p = new_tree;
3783 return GS_OK;
3784 }
3785 }
3786 else
3787 {
3788 *expr_p = error_mark_node;
3789 return GS_ERROR;
3790 }
3791
3792 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3793 decl. This allows us to eliminate redundant or useless
3794 calls to "const" functions. */
3795 if (TREE_CODE (*expr_p) == CALL_EXPR)
3796 {
3797 int flags = call_expr_flags (*expr_p);
3798 if (flags & (ECF_CONST | ECF_PURE)
3799 /* An infinite loop is considered a side effect. */
3800 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3801 TREE_SIDE_EFFECTS (*expr_p) = 0;
3802 }
3803
3804 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3805 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3806 form and delegate the creation of a GIMPLE_CALL to
3807 gimplify_modify_expr. This is always possible because when
3808 WANT_VALUE is true, the caller wants the result of this call into
3809 a temporary, which means that we will emit an INIT_EXPR in
3810 internal_get_tmp_var which will then be handled by
3811 gimplify_modify_expr. */
3812 if (!want_value)
3813 {
3814 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3815 have to do is replicate it as a GIMPLE_CALL tuple. */
3816 gimple_stmt_iterator gsi;
3817 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3818 notice_special_calls (call);
3819 gimplify_seq_add_stmt (pre_p, call);
3820 gsi = gsi_last (*pre_p);
3821 maybe_fold_stmt (&gsi);
3822 *expr_p = NULL_TREE;
3823 }
3824 else
3825 /* Remember the original function type. */
3826 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3827 CALL_EXPR_FN (*expr_p));
3828
3829 return ret;
3830 }
3831
3832 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3833 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3834
3835 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3836 condition is true or false, respectively. If null, we should generate
3837 our own to skip over the evaluation of this specific expression.
3838
3839 LOCUS is the source location of the COND_EXPR.
3840
3841 This function is the tree equivalent of do_jump.
3842
3843 shortcut_cond_r should only be called by shortcut_cond_expr. */
3844
3845 static tree
shortcut_cond_r(tree pred,tree * true_label_p,tree * false_label_p,location_t locus)3846 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3847 location_t locus)
3848 {
3849 tree local_label = NULL_TREE;
3850 tree t, expr = NULL;
3851
3852 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3853 retain the shortcut semantics. Just insert the gotos here;
3854 shortcut_cond_expr will append the real blocks later. */
3855 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3856 {
3857 location_t new_locus;
3858
3859 /* Turn if (a && b) into
3860
3861 if (a); else goto no;
3862 if (b) goto yes; else goto no;
3863 (no:) */
3864
3865 if (false_label_p == NULL)
3866 false_label_p = &local_label;
3867
3868 /* Keep the original source location on the first 'if'. */
3869 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3870 append_to_statement_list (t, &expr);
3871
3872 /* Set the source location of the && on the second 'if'. */
3873 new_locus = rexpr_location (pred, locus);
3874 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3875 new_locus);
3876 append_to_statement_list (t, &expr);
3877 }
3878 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3879 {
3880 location_t new_locus;
3881
3882 /* Turn if (a || b) into
3883
3884 if (a) goto yes;
3885 if (b) goto yes; else goto no;
3886 (yes:) */
3887
3888 if (true_label_p == NULL)
3889 true_label_p = &local_label;
3890
3891 /* Keep the original source location on the first 'if'. */
3892 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3893 append_to_statement_list (t, &expr);
3894
3895 /* Set the source location of the || on the second 'if'. */
3896 new_locus = rexpr_location (pred, locus);
3897 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3898 new_locus);
3899 append_to_statement_list (t, &expr);
3900 }
3901 else if (TREE_CODE (pred) == COND_EXPR
3902 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3903 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3904 {
3905 location_t new_locus;
3906
3907 /* As long as we're messing with gotos, turn if (a ? b : c) into
3908 if (a)
3909 if (b) goto yes; else goto no;
3910 else
3911 if (c) goto yes; else goto no;
3912
3913 Don't do this if one of the arms has void type, which can happen
3914 in C++ when the arm is throw. */
3915
3916 /* Keep the original source location on the first 'if'. Set the source
3917 location of the ? on the second 'if'. */
3918 new_locus = rexpr_location (pred, locus);
3919 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3920 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3921 false_label_p, locus),
3922 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3923 false_label_p, new_locus));
3924 }
3925 else
3926 {
3927 expr = build3 (COND_EXPR, void_type_node, pred,
3928 build_and_jump (true_label_p),
3929 build_and_jump (false_label_p));
3930 SET_EXPR_LOCATION (expr, locus);
3931 }
3932
3933 if (local_label)
3934 {
3935 t = build1 (LABEL_EXPR, void_type_node, local_label);
3936 append_to_statement_list (t, &expr);
3937 }
3938
3939 return expr;
3940 }
3941
3942 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3943 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3944 statement, if it is the last one. Otherwise, return NULL. */
3945
3946 static tree
find_goto(tree expr)3947 find_goto (tree expr)
3948 {
3949 if (!expr)
3950 return NULL_TREE;
3951
3952 if (TREE_CODE (expr) == GOTO_EXPR)
3953 return expr;
3954
3955 if (TREE_CODE (expr) != STATEMENT_LIST)
3956 return NULL_TREE;
3957
3958 tree_stmt_iterator i = tsi_start (expr);
3959
3960 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3961 tsi_next (&i);
3962
3963 if (!tsi_one_before_end_p (i))
3964 return NULL_TREE;
3965
3966 return find_goto (tsi_stmt (i));
3967 }
3968
3969 /* Same as find_goto, except that it returns NULL if the destination
3970 is not a LABEL_DECL. */
3971
3972 static inline tree
find_goto_label(tree expr)3973 find_goto_label (tree expr)
3974 {
3975 tree dest = find_goto (expr);
3976 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3977 return dest;
3978 return NULL_TREE;
3979 }
3980
3981 /* Given a conditional expression EXPR with short-circuit boolean
3982 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3983 predicate apart into the equivalent sequence of conditionals. */
3984
3985 static tree
shortcut_cond_expr(tree expr)3986 shortcut_cond_expr (tree expr)
3987 {
3988 tree pred = TREE_OPERAND (expr, 0);
3989 tree then_ = TREE_OPERAND (expr, 1);
3990 tree else_ = TREE_OPERAND (expr, 2);
3991 tree true_label, false_label, end_label, t;
3992 tree *true_label_p;
3993 tree *false_label_p;
3994 bool emit_end, emit_false, jump_over_else;
3995 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3996 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3997
3998 /* First do simple transformations. */
3999 if (!else_se)
4000 {
4001 /* If there is no 'else', turn
4002 if (a && b) then c
4003 into
4004 if (a) if (b) then c. */
4005 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4006 {
4007 /* Keep the original source location on the first 'if'. */
4008 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4009 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4010 /* Set the source location of the && on the second 'if'. */
4011 if (rexpr_has_location (pred))
4012 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4013 then_ = shortcut_cond_expr (expr);
4014 then_se = then_ && TREE_SIDE_EFFECTS (then_);
4015 pred = TREE_OPERAND (pred, 0);
4016 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
4017 SET_EXPR_LOCATION (expr, locus);
4018 }
4019 }
4020
4021 if (!then_se)
4022 {
4023 /* If there is no 'then', turn
4024 if (a || b); else d
4025 into
4026 if (a); else if (b); else d. */
4027 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4028 {
4029 /* Keep the original source location on the first 'if'. */
4030 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4031 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4032 /* Set the source location of the || on the second 'if'. */
4033 if (rexpr_has_location (pred))
4034 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4035 else_ = shortcut_cond_expr (expr);
4036 else_se = else_ && TREE_SIDE_EFFECTS (else_);
4037 pred = TREE_OPERAND (pred, 0);
4038 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
4039 SET_EXPR_LOCATION (expr, locus);
4040 }
4041 }
4042
4043 /* If we're done, great. */
4044 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
4045 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
4046 return expr;
4047
4048 /* Otherwise we need to mess with gotos. Change
4049 if (a) c; else d;
4050 to
4051 if (a); else goto no;
4052 c; goto end;
4053 no: d; end:
4054 and recursively gimplify the condition. */
4055
4056 true_label = false_label = end_label = NULL_TREE;
4057
4058 /* If our arms just jump somewhere, hijack those labels so we don't
4059 generate jumps to jumps. */
4060
4061 if (tree then_goto = find_goto_label (then_))
4062 {
4063 true_label = GOTO_DESTINATION (then_goto);
4064 then_ = NULL;
4065 then_se = false;
4066 }
4067
4068 if (tree else_goto = find_goto_label (else_))
4069 {
4070 false_label = GOTO_DESTINATION (else_goto);
4071 else_ = NULL;
4072 else_se = false;
4073 }
4074
4075 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4076 if (true_label)
4077 true_label_p = &true_label;
4078 else
4079 true_label_p = NULL;
4080
4081 /* The 'else' branch also needs a label if it contains interesting code. */
4082 if (false_label || else_se)
4083 false_label_p = &false_label;
4084 else
4085 false_label_p = NULL;
4086
4087 /* If there was nothing else in our arms, just forward the label(s). */
4088 if (!then_se && !else_se)
4089 return shortcut_cond_r (pred, true_label_p, false_label_p,
4090 EXPR_LOC_OR_LOC (expr, input_location));
4091
4092 /* If our last subexpression already has a terminal label, reuse it. */
4093 if (else_se)
4094 t = expr_last (else_);
4095 else if (then_se)
4096 t = expr_last (then_);
4097 else
4098 t = NULL;
4099 if (t && TREE_CODE (t) == LABEL_EXPR)
4100 end_label = LABEL_EXPR_LABEL (t);
4101
4102 /* If we don't care about jumping to the 'else' branch, jump to the end
4103 if the condition is false. */
4104 if (!false_label_p)
4105 false_label_p = &end_label;
4106
4107 /* We only want to emit these labels if we aren't hijacking them. */
4108 emit_end = (end_label == NULL_TREE);
4109 emit_false = (false_label == NULL_TREE);
4110
4111 /* We only emit the jump over the else clause if we have to--if the
4112 then clause may fall through. Otherwise we can wind up with a
4113 useless jump and a useless label at the end of gimplified code,
4114 which will cause us to think that this conditional as a whole
4115 falls through even if it doesn't. If we then inline a function
4116 which ends with such a condition, that can cause us to issue an
4117 inappropriate warning about control reaching the end of a
4118 non-void function. */
4119 jump_over_else = block_may_fallthru (then_);
4120
4121 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
4122 EXPR_LOC_OR_LOC (expr, input_location));
4123
4124 expr = NULL;
4125 append_to_statement_list (pred, &expr);
4126
4127 append_to_statement_list (then_, &expr);
4128 if (else_se)
4129 {
4130 if (jump_over_else)
4131 {
4132 tree last = expr_last (expr);
4133 t = build_and_jump (&end_label);
4134 if (rexpr_has_location (last))
4135 SET_EXPR_LOCATION (t, rexpr_location (last));
4136 append_to_statement_list (t, &expr);
4137 }
4138 if (emit_false)
4139 {
4140 t = build1 (LABEL_EXPR, void_type_node, false_label);
4141 append_to_statement_list (t, &expr);
4142 }
4143 append_to_statement_list (else_, &expr);
4144 }
4145 if (emit_end && end_label)
4146 {
4147 t = build1 (LABEL_EXPR, void_type_node, end_label);
4148 append_to_statement_list (t, &expr);
4149 }
4150
4151 return expr;
4152 }
4153
4154 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4155
4156 tree
gimple_boolify(tree expr)4157 gimple_boolify (tree expr)
4158 {
4159 tree type = TREE_TYPE (expr);
4160 location_t loc = EXPR_LOCATION (expr);
4161
4162 if (TREE_CODE (expr) == NE_EXPR
4163 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
4164 && integer_zerop (TREE_OPERAND (expr, 1)))
4165 {
4166 tree call = TREE_OPERAND (expr, 0);
4167 tree fn = get_callee_fndecl (call);
4168
4169 /* For __builtin_expect ((long) (x), y) recurse into x as well
4170 if x is truth_value_p. */
4171 if (fn
4172 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
4173 && call_expr_nargs (call) == 2)
4174 {
4175 tree arg = CALL_EXPR_ARG (call, 0);
4176 if (arg)
4177 {
4178 if (TREE_CODE (arg) == NOP_EXPR
4179 && TREE_TYPE (arg) == TREE_TYPE (call))
4180 arg = TREE_OPERAND (arg, 0);
4181 if (truth_value_p (TREE_CODE (arg)))
4182 {
4183 arg = gimple_boolify (arg);
4184 CALL_EXPR_ARG (call, 0)
4185 = fold_convert_loc (loc, TREE_TYPE (call), arg);
4186 }
4187 }
4188 }
4189 }
4190
4191 switch (TREE_CODE (expr))
4192 {
4193 case TRUTH_AND_EXPR:
4194 case TRUTH_OR_EXPR:
4195 case TRUTH_XOR_EXPR:
4196 case TRUTH_ANDIF_EXPR:
4197 case TRUTH_ORIF_EXPR:
4198 /* Also boolify the arguments of truth exprs. */
4199 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
4200 /* FALLTHRU */
4201
4202 case TRUTH_NOT_EXPR:
4203 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4204
4205 /* These expressions always produce boolean results. */
4206 if (TREE_CODE (type) != BOOLEAN_TYPE)
4207 TREE_TYPE (expr) = boolean_type_node;
4208 return expr;
4209
4210 case ANNOTATE_EXPR:
4211 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
4212 {
4213 case annot_expr_ivdep_kind:
4214 case annot_expr_unroll_kind:
4215 case annot_expr_no_vector_kind:
4216 case annot_expr_vector_kind:
4217 case annot_expr_parallel_kind:
4218 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4219 if (TREE_CODE (type) != BOOLEAN_TYPE)
4220 TREE_TYPE (expr) = boolean_type_node;
4221 return expr;
4222 default:
4223 gcc_unreachable ();
4224 }
4225
4226 default:
4227 if (COMPARISON_CLASS_P (expr))
4228 {
4229 /* There expressions always prduce boolean results. */
4230 if (TREE_CODE (type) != BOOLEAN_TYPE)
4231 TREE_TYPE (expr) = boolean_type_node;
4232 return expr;
4233 }
4234 /* Other expressions that get here must have boolean values, but
4235 might need to be converted to the appropriate mode. */
4236 if (TREE_CODE (type) == BOOLEAN_TYPE)
4237 return expr;
4238 return fold_convert_loc (loc, boolean_type_node, expr);
4239 }
4240 }
4241
4242 /* Given a conditional expression *EXPR_P without side effects, gimplify
4243 its operands. New statements are inserted to PRE_P. */
4244
4245 static enum gimplify_status
gimplify_pure_cond_expr(tree * expr_p,gimple_seq * pre_p)4246 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4247 {
4248 tree expr = *expr_p, cond;
4249 enum gimplify_status ret, tret;
4250 enum tree_code code;
4251
4252 cond = gimple_boolify (COND_EXPR_COND (expr));
4253
4254 /* We need to handle && and || specially, as their gimplification
4255 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4256 code = TREE_CODE (cond);
4257 if (code == TRUTH_ANDIF_EXPR)
4258 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4259 else if (code == TRUTH_ORIF_EXPR)
4260 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4261 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
4262 COND_EXPR_COND (*expr_p) = cond;
4263
4264 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4265 is_gimple_val, fb_rvalue);
4266 ret = MIN (ret, tret);
4267 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4268 is_gimple_val, fb_rvalue);
4269
4270 return MIN (ret, tret);
4271 }
4272
4273 /* Return true if evaluating EXPR could trap.
4274 EXPR is GENERIC, while tree_could_trap_p can be called
4275 only on GIMPLE. */
4276
4277 bool
generic_expr_could_trap_p(tree expr)4278 generic_expr_could_trap_p (tree expr)
4279 {
4280 unsigned i, n;
4281
4282 if (!expr || is_gimple_val (expr))
4283 return false;
4284
4285 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4286 return true;
4287
4288 n = TREE_OPERAND_LENGTH (expr);
4289 for (i = 0; i < n; i++)
4290 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4291 return true;
4292
4293 return false;
4294 }
4295
4296 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4297 into
4298
4299 if (p) if (p)
4300 t1 = a; a;
4301 else or else
4302 t1 = b; b;
4303 t1;
4304
4305 The second form is used when *EXPR_P is of type void.
4306
4307 PRE_P points to the list where side effects that must happen before
4308 *EXPR_P should be stored. */
4309
4310 static enum gimplify_status
gimplify_cond_expr(tree * expr_p,gimple_seq * pre_p,fallback_t fallback)4311 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4312 {
4313 tree expr = *expr_p;
4314 tree type = TREE_TYPE (expr);
4315 location_t loc = EXPR_LOCATION (expr);
4316 tree tmp, arm1, arm2;
4317 enum gimplify_status ret;
4318 tree label_true, label_false, label_cont;
4319 bool have_then_clause_p, have_else_clause_p;
4320 gcond *cond_stmt;
4321 enum tree_code pred_code;
4322 gimple_seq seq = NULL;
4323
4324 /* If this COND_EXPR has a value, copy the values into a temporary within
4325 the arms. */
4326 if (!VOID_TYPE_P (type))
4327 {
4328 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4329 tree result;
4330
4331 /* If either an rvalue is ok or we do not require an lvalue, create the
4332 temporary. But we cannot do that if the type is addressable. */
4333 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4334 && !TREE_ADDRESSABLE (type))
4335 {
4336 if (gimplify_ctxp->allow_rhs_cond_expr
4337 /* If either branch has side effects or could trap, it can't be
4338 evaluated unconditionally. */
4339 && !TREE_SIDE_EFFECTS (then_)
4340 && !generic_expr_could_trap_p (then_)
4341 && !TREE_SIDE_EFFECTS (else_)
4342 && !generic_expr_could_trap_p (else_))
4343 return gimplify_pure_cond_expr (expr_p, pre_p);
4344
4345 tmp = create_tmp_var (type, "iftmp");
4346 result = tmp;
4347 }
4348
4349 /* Otherwise, only create and copy references to the values. */
4350 else
4351 {
4352 type = build_pointer_type (type);
4353
4354 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4355 then_ = build_fold_addr_expr_loc (loc, then_);
4356
4357 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4358 else_ = build_fold_addr_expr_loc (loc, else_);
4359
4360 expr
4361 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4362
4363 tmp = create_tmp_var (type, "iftmp");
4364 result = build_simple_mem_ref_loc (loc, tmp);
4365 }
4366
4367 /* Build the new then clause, `tmp = then_;'. But don't build the
4368 assignment if the value is void; in C++ it can be if it's a throw. */
4369 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4370 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4371
4372 /* Similarly, build the new else clause, `tmp = else_;'. */
4373 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4374 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4375
4376 TREE_TYPE (expr) = void_type_node;
4377 recalculate_side_effects (expr);
4378
4379 /* Move the COND_EXPR to the prequeue. */
4380 gimplify_stmt (&expr, pre_p);
4381
4382 *expr_p = result;
4383 return GS_ALL_DONE;
4384 }
4385
4386 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4387 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4388 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4389 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4390
4391 /* Make sure the condition has BOOLEAN_TYPE. */
4392 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4393
4394 /* Break apart && and || conditions. */
4395 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4396 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4397 {
4398 expr = shortcut_cond_expr (expr);
4399
4400 if (expr != *expr_p)
4401 {
4402 *expr_p = expr;
4403
4404 /* We can't rely on gimplify_expr to re-gimplify the expanded
4405 form properly, as cleanups might cause the target labels to be
4406 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4407 set up a conditional context. */
4408 gimple_push_condition ();
4409 gimplify_stmt (expr_p, &seq);
4410 gimple_pop_condition (pre_p);
4411 gimple_seq_add_seq (pre_p, seq);
4412
4413 return GS_ALL_DONE;
4414 }
4415 }
4416
4417 /* Now do the normal gimplification. */
4418
4419 /* Gimplify condition. */
4420 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4421 is_gimple_condexpr_for_cond, fb_rvalue);
4422 if (ret == GS_ERROR)
4423 return GS_ERROR;
4424 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4425
4426 gimple_push_condition ();
4427
4428 have_then_clause_p = have_else_clause_p = false;
4429 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4430 if (label_true
4431 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4432 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4433 have different locations, otherwise we end up with incorrect
4434 location information on the branches. */
4435 && (optimize
4436 || !EXPR_HAS_LOCATION (expr)
4437 || !rexpr_has_location (label_true)
4438 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4439 {
4440 have_then_clause_p = true;
4441 label_true = GOTO_DESTINATION (label_true);
4442 }
4443 else
4444 label_true = create_artificial_label (UNKNOWN_LOCATION);
4445 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4446 if (label_false
4447 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4448 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4449 have different locations, otherwise we end up with incorrect
4450 location information on the branches. */
4451 && (optimize
4452 || !EXPR_HAS_LOCATION (expr)
4453 || !rexpr_has_location (label_false)
4454 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4455 {
4456 have_else_clause_p = true;
4457 label_false = GOTO_DESTINATION (label_false);
4458 }
4459 else
4460 label_false = create_artificial_label (UNKNOWN_LOCATION);
4461
4462 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4463 &arm2);
4464 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4465 label_false);
4466 gimple_set_location (cond_stmt, EXPR_LOCATION (expr));
4467 copy_warning (cond_stmt, COND_EXPR_COND (expr));
4468 gimplify_seq_add_stmt (&seq, cond_stmt);
4469 gimple_stmt_iterator gsi = gsi_last (seq);
4470 maybe_fold_stmt (&gsi);
4471
4472 label_cont = NULL_TREE;
4473 if (!have_then_clause_p)
4474 {
4475 /* For if (...) {} else { code; } put label_true after
4476 the else block. */
4477 if (TREE_OPERAND (expr, 1) == NULL_TREE
4478 && !have_else_clause_p
4479 && TREE_OPERAND (expr, 2) != NULL_TREE)
4480 {
4481 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4482 handling that label_cont == label_true can be only reached
4483 through fallthrough from { code; }. */
4484 if (integer_zerop (COND_EXPR_COND (expr)))
4485 UNUSED_LABEL_P (label_true) = 1;
4486 label_cont = label_true;
4487 }
4488 else
4489 {
4490 bool then_side_effects
4491 = (TREE_OPERAND (expr, 1)
4492 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)));
4493 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4494 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4495 /* For if (...) { code; } else {} or
4496 if (...) { code; } else goto label; or
4497 if (...) { code; return; } else { ... }
4498 label_cont isn't needed. */
4499 if (!have_else_clause_p
4500 && TREE_OPERAND (expr, 2) != NULL_TREE
4501 && gimple_seq_may_fallthru (seq))
4502 {
4503 gimple *g;
4504 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4505
4506 /* For if (0) { non-side-effect-code } else { code }
4507 tell -Wimplicit-fallthrough handling that label_cont can
4508 be only reached through fallthrough from { code }. */
4509 if (integer_zerop (COND_EXPR_COND (expr)))
4510 {
4511 UNUSED_LABEL_P (label_true) = 1;
4512 if (!then_side_effects)
4513 UNUSED_LABEL_P (label_cont) = 1;
4514 }
4515
4516 g = gimple_build_goto (label_cont);
4517
4518 /* GIMPLE_COND's are very low level; they have embedded
4519 gotos. This particular embedded goto should not be marked
4520 with the location of the original COND_EXPR, as it would
4521 correspond to the COND_EXPR's condition, not the ELSE or the
4522 THEN arms. To avoid marking it with the wrong location, flag
4523 it as "no location". */
4524 gimple_set_do_not_emit_location (g);
4525
4526 gimplify_seq_add_stmt (&seq, g);
4527 }
4528 }
4529 }
4530 if (!have_else_clause_p)
4531 {
4532 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4533 tell -Wimplicit-fallthrough handling that label_false can be only
4534 reached through fallthrough from { code }. */
4535 if (integer_nonzerop (COND_EXPR_COND (expr))
4536 && (TREE_OPERAND (expr, 2) == NULL_TREE
4537 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2))))
4538 UNUSED_LABEL_P (label_false) = 1;
4539 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4540 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4541 }
4542 if (label_cont)
4543 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4544
4545 gimple_pop_condition (pre_p);
4546 gimple_seq_add_seq (pre_p, seq);
4547
4548 if (ret == GS_ERROR)
4549 ; /* Do nothing. */
4550 else if (have_then_clause_p || have_else_clause_p)
4551 ret = GS_ALL_DONE;
4552 else
4553 {
4554 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4555 expr = TREE_OPERAND (expr, 0);
4556 gimplify_stmt (&expr, pre_p);
4557 }
4558
4559 *expr_p = NULL;
4560 return ret;
4561 }
4562
4563 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4564 to be marked addressable.
4565
4566 We cannot rely on such an expression being directly markable if a temporary
4567 has been created by the gimplification. In this case, we create another
4568 temporary and initialize it with a copy, which will become a store after we
4569 mark it addressable. This can happen if the front-end passed us something
4570 that it could not mark addressable yet, like a Fortran pass-by-reference
4571 parameter (int) floatvar. */
4572
4573 static void
prepare_gimple_addressable(tree * expr_p,gimple_seq * seq_p)4574 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4575 {
4576 while (handled_component_p (*expr_p))
4577 expr_p = &TREE_OPERAND (*expr_p, 0);
4578 if (is_gimple_reg (*expr_p))
4579 {
4580 /* Do not allow an SSA name as the temporary. */
4581 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4582 DECL_NOT_GIMPLE_REG_P (var) = 1;
4583 *expr_p = var;
4584 }
4585 }
4586
4587 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4588 a call to __builtin_memcpy. */
4589
4590 static enum gimplify_status
gimplify_modify_expr_to_memcpy(tree * expr_p,tree size,bool want_value,gimple_seq * seq_p)4591 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4592 gimple_seq *seq_p)
4593 {
4594 tree t, to, to_ptr, from, from_ptr;
4595 gcall *gs;
4596 location_t loc = EXPR_LOCATION (*expr_p);
4597
4598 to = TREE_OPERAND (*expr_p, 0);
4599 from = TREE_OPERAND (*expr_p, 1);
4600
4601 /* Mark the RHS addressable. Beware that it may not be possible to do so
4602 directly if a temporary has been created by the gimplification. */
4603 prepare_gimple_addressable (&from, seq_p);
4604
4605 mark_addressable (from);
4606 from_ptr = build_fold_addr_expr_loc (loc, from);
4607 gimplify_arg (&from_ptr, seq_p, loc);
4608
4609 mark_addressable (to);
4610 to_ptr = build_fold_addr_expr_loc (loc, to);
4611 gimplify_arg (&to_ptr, seq_p, loc);
4612
4613 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4614
4615 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4616 gimple_call_set_alloca_for_var (gs, true);
4617
4618 if (want_value)
4619 {
4620 /* tmp = memcpy() */
4621 t = create_tmp_var (TREE_TYPE (to_ptr));
4622 gimple_call_set_lhs (gs, t);
4623 gimplify_seq_add_stmt (seq_p, gs);
4624
4625 *expr_p = build_simple_mem_ref (t);
4626 return GS_ALL_DONE;
4627 }
4628
4629 gimplify_seq_add_stmt (seq_p, gs);
4630 *expr_p = NULL;
4631 return GS_ALL_DONE;
4632 }
4633
4634 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4635 a call to __builtin_memset. In this case we know that the RHS is
4636 a CONSTRUCTOR with an empty element list. */
4637
4638 static enum gimplify_status
gimplify_modify_expr_to_memset(tree * expr_p,tree size,bool want_value,gimple_seq * seq_p)4639 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4640 gimple_seq *seq_p)
4641 {
4642 tree t, from, to, to_ptr;
4643 gcall *gs;
4644 location_t loc = EXPR_LOCATION (*expr_p);
4645
4646 /* Assert our assumptions, to abort instead of producing wrong code
4647 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4648 not be immediately exposed. */
4649 from = TREE_OPERAND (*expr_p, 1);
4650 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4651 from = TREE_OPERAND (from, 0);
4652
4653 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4654 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4655
4656 /* Now proceed. */
4657 to = TREE_OPERAND (*expr_p, 0);
4658
4659 to_ptr = build_fold_addr_expr_loc (loc, to);
4660 gimplify_arg (&to_ptr, seq_p, loc);
4661 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4662
4663 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4664
4665 if (want_value)
4666 {
4667 /* tmp = memset() */
4668 t = create_tmp_var (TREE_TYPE (to_ptr));
4669 gimple_call_set_lhs (gs, t);
4670 gimplify_seq_add_stmt (seq_p, gs);
4671
4672 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4673 return GS_ALL_DONE;
4674 }
4675
4676 gimplify_seq_add_stmt (seq_p, gs);
4677 *expr_p = NULL;
4678 return GS_ALL_DONE;
4679 }
4680
4681 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4682 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4683 assignment. Return non-null if we detect a potential overlap. */
4684
4685 struct gimplify_init_ctor_preeval_data
4686 {
4687 /* The base decl of the lhs object. May be NULL, in which case we
4688 have to assume the lhs is indirect. */
4689 tree lhs_base_decl;
4690
4691 /* The alias set of the lhs object. */
4692 alias_set_type lhs_alias_set;
4693 };
4694
4695 static tree
gimplify_init_ctor_preeval_1(tree * tp,int * walk_subtrees,void * xdata)4696 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4697 {
4698 struct gimplify_init_ctor_preeval_data *data
4699 = (struct gimplify_init_ctor_preeval_data *) xdata;
4700 tree t = *tp;
4701
4702 /* If we find the base object, obviously we have overlap. */
4703 if (data->lhs_base_decl == t)
4704 return t;
4705
4706 /* If the constructor component is indirect, determine if we have a
4707 potential overlap with the lhs. The only bits of information we
4708 have to go on at this point are addressability and alias sets. */
4709 if ((INDIRECT_REF_P (t)
4710 || TREE_CODE (t) == MEM_REF)
4711 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4712 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4713 return t;
4714
4715 /* If the constructor component is a call, determine if it can hide a
4716 potential overlap with the lhs through an INDIRECT_REF like above.
4717 ??? Ugh - this is completely broken. In fact this whole analysis
4718 doesn't look conservative. */
4719 if (TREE_CODE (t) == CALL_EXPR)
4720 {
4721 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4722
4723 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4724 if (POINTER_TYPE_P (TREE_VALUE (type))
4725 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4726 && alias_sets_conflict_p (data->lhs_alias_set,
4727 get_alias_set
4728 (TREE_TYPE (TREE_VALUE (type)))))
4729 return t;
4730 }
4731
4732 if (IS_TYPE_OR_DECL_P (t))
4733 *walk_subtrees = 0;
4734 return NULL;
4735 }
4736
4737 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4738 force values that overlap with the lhs (as described by *DATA)
4739 into temporaries. */
4740
4741 static void
gimplify_init_ctor_preeval(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,struct gimplify_init_ctor_preeval_data * data)4742 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4743 struct gimplify_init_ctor_preeval_data *data)
4744 {
4745 enum gimplify_status one;
4746
4747 /* If the value is constant, then there's nothing to pre-evaluate. */
4748 if (TREE_CONSTANT (*expr_p))
4749 {
4750 /* Ensure it does not have side effects, it might contain a reference to
4751 the object we're initializing. */
4752 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4753 return;
4754 }
4755
4756 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4757 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4758 return;
4759
4760 /* Recurse for nested constructors. */
4761 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4762 {
4763 unsigned HOST_WIDE_INT ix;
4764 constructor_elt *ce;
4765 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4766
4767 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4768 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4769
4770 return;
4771 }
4772
4773 /* If this is a variable sized type, we must remember the size. */
4774 maybe_with_size_expr (expr_p);
4775
4776 /* Gimplify the constructor element to something appropriate for the rhs
4777 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4778 the gimplifier will consider this a store to memory. Doing this
4779 gimplification now means that we won't have to deal with complicated
4780 language-specific trees, nor trees like SAVE_EXPR that can induce
4781 exponential search behavior. */
4782 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4783 if (one == GS_ERROR)
4784 {
4785 *expr_p = NULL;
4786 return;
4787 }
4788
4789 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4790 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4791 always be true for all scalars, since is_gimple_mem_rhs insists on a
4792 temporary variable for them. */
4793 if (DECL_P (*expr_p))
4794 return;
4795
4796 /* If this is of variable size, we have no choice but to assume it doesn't
4797 overlap since we can't make a temporary for it. */
4798 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4799 return;
4800
4801 /* Otherwise, we must search for overlap ... */
4802 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4803 return;
4804
4805 /* ... and if found, force the value into a temporary. */
4806 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4807 }
4808
4809 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4810 a RANGE_EXPR in a CONSTRUCTOR for an array.
4811
4812 var = lower;
4813 loop_entry:
4814 object[var] = value;
4815 if (var == upper)
4816 goto loop_exit;
4817 var = var + 1;
4818 goto loop_entry;
4819 loop_exit:
4820
4821 We increment var _after_ the loop exit check because we might otherwise
4822 fail if upper == TYPE_MAX_VALUE (type for upper).
4823
4824 Note that we never have to deal with SAVE_EXPRs here, because this has
4825 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4826
4827 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4828 gimple_seq *, bool);
4829
4830 static void
gimplify_init_ctor_eval_range(tree object,tree lower,tree upper,tree value,tree array_elt_type,gimple_seq * pre_p,bool cleared)4831 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4832 tree value, tree array_elt_type,
4833 gimple_seq *pre_p, bool cleared)
4834 {
4835 tree loop_entry_label, loop_exit_label, fall_thru_label;
4836 tree var, var_type, cref, tmp;
4837
4838 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4839 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4840 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4841
4842 /* Create and initialize the index variable. */
4843 var_type = TREE_TYPE (upper);
4844 var = create_tmp_var (var_type);
4845 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4846
4847 /* Add the loop entry label. */
4848 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4849
4850 /* Build the reference. */
4851 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4852 var, NULL_TREE, NULL_TREE);
4853
4854 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4855 the store. Otherwise just assign value to the reference. */
4856
4857 if (TREE_CODE (value) == CONSTRUCTOR)
4858 /* NB we might have to call ourself recursively through
4859 gimplify_init_ctor_eval if the value is a constructor. */
4860 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4861 pre_p, cleared);
4862 else
4863 {
4864 if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
4865 != GS_ERROR)
4866 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4867 }
4868
4869 /* We exit the loop when the index var is equal to the upper bound. */
4870 gimplify_seq_add_stmt (pre_p,
4871 gimple_build_cond (EQ_EXPR, var, upper,
4872 loop_exit_label, fall_thru_label));
4873
4874 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4875
4876 /* Otherwise, increment the index var... */
4877 tmp = build2 (PLUS_EXPR, var_type, var,
4878 fold_convert (var_type, integer_one_node));
4879 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4880
4881 /* ...and jump back to the loop entry. */
4882 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4883
4884 /* Add the loop exit label. */
4885 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4886 }
4887
4888 /* A subroutine of gimplify_init_constructor. Generate individual
4889 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4890 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4891 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4892 zeroed first. */
4893
4894 static void
gimplify_init_ctor_eval(tree object,vec<constructor_elt,va_gc> * elts,gimple_seq * pre_p,bool cleared)4895 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4896 gimple_seq *pre_p, bool cleared)
4897 {
4898 tree array_elt_type = NULL;
4899 unsigned HOST_WIDE_INT ix;
4900 tree purpose, value;
4901
4902 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4903 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4904
4905 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4906 {
4907 tree cref;
4908
4909 /* NULL values are created above for gimplification errors. */
4910 if (value == NULL)
4911 continue;
4912
4913 if (cleared && initializer_zerop (value))
4914 continue;
4915
4916 /* ??? Here's to hoping the front end fills in all of the indices,
4917 so we don't have to figure out what's missing ourselves. */
4918 gcc_assert (purpose);
4919
4920 /* Skip zero-sized fields, unless value has side-effects. This can
4921 happen with calls to functions returning a empty type, which
4922 we shouldn't discard. As a number of downstream passes don't
4923 expect sets of empty type fields, we rely on the gimplification of
4924 the MODIFY_EXPR we make below to drop the assignment statement. */
4925 if (!TREE_SIDE_EFFECTS (value)
4926 && TREE_CODE (purpose) == FIELD_DECL
4927 && is_empty_type (TREE_TYPE (purpose)))
4928 continue;
4929
4930 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4931 whole range. */
4932 if (TREE_CODE (purpose) == RANGE_EXPR)
4933 {
4934 tree lower = TREE_OPERAND (purpose, 0);
4935 tree upper = TREE_OPERAND (purpose, 1);
4936
4937 /* If the lower bound is equal to upper, just treat it as if
4938 upper was the index. */
4939 if (simple_cst_equal (lower, upper))
4940 purpose = upper;
4941 else
4942 {
4943 gimplify_init_ctor_eval_range (object, lower, upper, value,
4944 array_elt_type, pre_p, cleared);
4945 continue;
4946 }
4947 }
4948
4949 if (array_elt_type)
4950 {
4951 /* Do not use bitsizetype for ARRAY_REF indices. */
4952 if (TYPE_DOMAIN (TREE_TYPE (object)))
4953 purpose
4954 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4955 purpose);
4956 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4957 purpose, NULL_TREE, NULL_TREE);
4958 }
4959 else
4960 {
4961 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4962 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4963 unshare_expr (object), purpose, NULL_TREE);
4964 }
4965
4966 if (TREE_CODE (value) == CONSTRUCTOR
4967 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4968 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4969 pre_p, cleared);
4970 else
4971 {
4972 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4973 gimplify_and_add (init, pre_p);
4974 ggc_free (init);
4975 }
4976 }
4977 }
4978
4979 /* Return the appropriate RHS predicate for this LHS. */
4980
4981 gimple_predicate
rhs_predicate_for(tree lhs)4982 rhs_predicate_for (tree lhs)
4983 {
4984 if (is_gimple_reg (lhs))
4985 return is_gimple_reg_rhs_or_call;
4986 else
4987 return is_gimple_mem_rhs_or_call;
4988 }
4989
4990 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4991 before the LHS has been gimplified. */
4992
4993 static gimple_predicate
initial_rhs_predicate_for(tree lhs)4994 initial_rhs_predicate_for (tree lhs)
4995 {
4996 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4997 return is_gimple_reg_rhs_or_call;
4998 else
4999 return is_gimple_mem_rhs_or_call;
5000 }
5001
5002 /* Gimplify a C99 compound literal expression. This just means adding
5003 the DECL_EXPR before the current statement and using its anonymous
5004 decl instead. */
5005
5006 static enum gimplify_status
gimplify_compound_literal_expr(tree * expr_p,gimple_seq * pre_p,bool (* gimple_test_f)(tree),fallback_t fallback)5007 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
5008 bool (*gimple_test_f) (tree),
5009 fallback_t fallback)
5010 {
5011 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
5012 tree decl = DECL_EXPR_DECL (decl_s);
5013 tree init = DECL_INITIAL (decl);
5014 /* Mark the decl as addressable if the compound literal
5015 expression is addressable now, otherwise it is marked too late
5016 after we gimplify the initialization expression. */
5017 if (TREE_ADDRESSABLE (*expr_p))
5018 TREE_ADDRESSABLE (decl) = 1;
5019 /* Otherwise, if we don't need an lvalue and have a literal directly
5020 substitute it. Check if it matches the gimple predicate, as
5021 otherwise we'd generate a new temporary, and we can as well just
5022 use the decl we already have. */
5023 else if (!TREE_ADDRESSABLE (decl)
5024 && !TREE_THIS_VOLATILE (decl)
5025 && init
5026 && (fallback & fb_lvalue) == 0
5027 && gimple_test_f (init))
5028 {
5029 *expr_p = init;
5030 return GS_OK;
5031 }
5032
5033 /* If the decl is not addressable, then it is being used in some
5034 expression or on the right hand side of a statement, and it can
5035 be put into a readonly data section. */
5036 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
5037 TREE_READONLY (decl) = 1;
5038
5039 /* This decl isn't mentioned in the enclosing block, so add it to the
5040 list of temps. FIXME it seems a bit of a kludge to say that
5041 anonymous artificial vars aren't pushed, but everything else is. */
5042 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
5043 gimple_add_tmp_var (decl);
5044
5045 gimplify_and_add (decl_s, pre_p);
5046 *expr_p = decl;
5047 return GS_OK;
5048 }
5049
5050 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5051 return a new CONSTRUCTOR if something changed. */
5052
5053 static tree
optimize_compound_literals_in_ctor(tree orig_ctor)5054 optimize_compound_literals_in_ctor (tree orig_ctor)
5055 {
5056 tree ctor = orig_ctor;
5057 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
5058 unsigned int idx, num = vec_safe_length (elts);
5059
5060 for (idx = 0; idx < num; idx++)
5061 {
5062 tree value = (*elts)[idx].value;
5063 tree newval = value;
5064 if (TREE_CODE (value) == CONSTRUCTOR)
5065 newval = optimize_compound_literals_in_ctor (value);
5066 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
5067 {
5068 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
5069 tree decl = DECL_EXPR_DECL (decl_s);
5070 tree init = DECL_INITIAL (decl);
5071
5072 if (!TREE_ADDRESSABLE (value)
5073 && !TREE_ADDRESSABLE (decl)
5074 && init
5075 && TREE_CODE (init) == CONSTRUCTOR)
5076 newval = optimize_compound_literals_in_ctor (init);
5077 }
5078 if (newval == value)
5079 continue;
5080
5081 if (ctor == orig_ctor)
5082 {
5083 ctor = copy_node (orig_ctor);
5084 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
5085 elts = CONSTRUCTOR_ELTS (ctor);
5086 }
5087 (*elts)[idx].value = newval;
5088 }
5089 return ctor;
5090 }
5091
5092 /* A subroutine of gimplify_modify_expr. Break out elements of a
5093 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5094
5095 Note that we still need to clear any elements that don't have explicit
5096 initializers, so if not all elements are initialized we keep the
5097 original MODIFY_EXPR, we just remove all of the constructor elements.
5098
5099 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5100 GS_ERROR if we would have to create a temporary when gimplifying
5101 this constructor. Otherwise, return GS_OK.
5102
5103 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5104
5105 static enum gimplify_status
gimplify_init_constructor(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value,bool notify_temp_creation)5106 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5107 bool want_value, bool notify_temp_creation)
5108 {
5109 tree object, ctor, type;
5110 enum gimplify_status ret;
5111 vec<constructor_elt, va_gc> *elts;
5112 bool cleared = false;
5113 bool is_empty_ctor = false;
5114 bool is_init_expr = (TREE_CODE (*expr_p) == INIT_EXPR);
5115
5116 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
5117
5118 if (!notify_temp_creation)
5119 {
5120 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5121 is_gimple_lvalue, fb_lvalue);
5122 if (ret == GS_ERROR)
5123 return ret;
5124 }
5125
5126 object = TREE_OPERAND (*expr_p, 0);
5127 ctor = TREE_OPERAND (*expr_p, 1)
5128 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
5129 type = TREE_TYPE (ctor);
5130 elts = CONSTRUCTOR_ELTS (ctor);
5131 ret = GS_ALL_DONE;
5132
5133 switch (TREE_CODE (type))
5134 {
5135 case RECORD_TYPE:
5136 case UNION_TYPE:
5137 case QUAL_UNION_TYPE:
5138 case ARRAY_TYPE:
5139 {
5140 /* Use readonly data for initializers of this or smaller size
5141 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5142 ratio. */
5143 const HOST_WIDE_INT min_unique_size = 64;
5144 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5145 is smaller than this, use readonly data. */
5146 const int unique_nonzero_ratio = 8;
5147 /* True if a single access of the object must be ensured. This is the
5148 case if the target is volatile, the type is non-addressable and more
5149 than one field need to be assigned. */
5150 const bool ensure_single_access
5151 = TREE_THIS_VOLATILE (object)
5152 && !TREE_ADDRESSABLE (type)
5153 && vec_safe_length (elts) > 1;
5154 struct gimplify_init_ctor_preeval_data preeval_data;
5155 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
5156 HOST_WIDE_INT num_unique_nonzero_elements;
5157 bool complete_p, valid_const_initializer;
5158
5159 /* Aggregate types must lower constructors to initialization of
5160 individual elements. The exception is that a CONSTRUCTOR node
5161 with no elements indicates zero-initialization of the whole. */
5162 if (vec_safe_is_empty (elts))
5163 {
5164 if (notify_temp_creation)
5165 return GS_OK;
5166
5167 /* The var will be initialized and so appear on lhs of
5168 assignment, it can't be TREE_READONLY anymore. */
5169 if (VAR_P (object))
5170 TREE_READONLY (object) = 0;
5171
5172 is_empty_ctor = true;
5173 break;
5174 }
5175
5176 /* Fetch information about the constructor to direct later processing.
5177 We might want to make static versions of it in various cases, and
5178 can only do so if it known to be a valid constant initializer. */
5179 valid_const_initializer
5180 = categorize_ctor_elements (ctor, &num_nonzero_elements,
5181 &num_unique_nonzero_elements,
5182 &num_ctor_elements, &complete_p);
5183
5184 /* If a const aggregate variable is being initialized, then it
5185 should never be a lose to promote the variable to be static. */
5186 if (valid_const_initializer
5187 && num_nonzero_elements > 1
5188 && TREE_READONLY (object)
5189 && VAR_P (object)
5190 && !DECL_REGISTER (object)
5191 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
5192 /* For ctors that have many repeated nonzero elements
5193 represented through RANGE_EXPRs, prefer initializing
5194 those through runtime loops over copies of large amounts
5195 of data from readonly data section. */
5196 && (num_unique_nonzero_elements
5197 > num_nonzero_elements / unique_nonzero_ratio
5198 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
5199 <= (unsigned HOST_WIDE_INT) min_unique_size)))
5200 {
5201 if (notify_temp_creation)
5202 return GS_ERROR;
5203
5204 DECL_INITIAL (object) = ctor;
5205 TREE_STATIC (object) = 1;
5206 if (!DECL_NAME (object))
5207 DECL_NAME (object) = create_tmp_var_name ("C");
5208 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
5209
5210 /* ??? C++ doesn't automatically append a .<number> to the
5211 assembler name, and even when it does, it looks at FE private
5212 data structures to figure out what that number should be,
5213 which are not set for this variable. I suppose this is
5214 important for local statics for inline functions, which aren't
5215 "local" in the object file sense. So in order to get a unique
5216 TU-local symbol, we must invoke the lhd version now. */
5217 lhd_set_decl_assembler_name (object);
5218
5219 *expr_p = NULL_TREE;
5220 break;
5221 }
5222
5223 /* The var will be initialized and so appear on lhs of
5224 assignment, it can't be TREE_READONLY anymore. */
5225 if (VAR_P (object) && !notify_temp_creation)
5226 TREE_READONLY (object) = 0;
5227
5228 /* If there are "lots" of initialized elements, even discounting
5229 those that are not address constants (and thus *must* be
5230 computed at runtime), then partition the constructor into
5231 constant and non-constant parts. Block copy the constant
5232 parts in, then generate code for the non-constant parts. */
5233 /* TODO. There's code in cp/typeck.cc to do this. */
5234
5235 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
5236 /* store_constructor will ignore the clearing of variable-sized
5237 objects. Initializers for such objects must explicitly set
5238 every field that needs to be set. */
5239 cleared = false;
5240 else if (!complete_p)
5241 /* If the constructor isn't complete, clear the whole object
5242 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5243
5244 ??? This ought not to be needed. For any element not present
5245 in the initializer, we should simply set them to zero. Except
5246 we'd need to *find* the elements that are not present, and that
5247 requires trickery to avoid quadratic compile-time behavior in
5248 large cases or excessive memory use in small cases. */
5249 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
5250 else if (num_ctor_elements - num_nonzero_elements
5251 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
5252 && num_nonzero_elements < num_ctor_elements / 4)
5253 /* If there are "lots" of zeros, it's more efficient to clear
5254 the memory and then set the nonzero elements. */
5255 cleared = true;
5256 else if (ensure_single_access && num_nonzero_elements == 0)
5257 /* If a single access to the target must be ensured and all elements
5258 are zero, then it's optimal to clear whatever their number. */
5259 cleared = true;
5260 else
5261 cleared = false;
5262
5263 /* If there are "lots" of initialized elements, and all of them
5264 are valid address constants, then the entire initializer can
5265 be dropped to memory, and then memcpy'd out. Don't do this
5266 for sparse arrays, though, as it's more efficient to follow
5267 the standard CONSTRUCTOR behavior of memset followed by
5268 individual element initialization. Also don't do this for small
5269 all-zero initializers (which aren't big enough to merit
5270 clearing), and don't try to make bitwise copies of
5271 TREE_ADDRESSABLE types. */
5272 if (valid_const_initializer
5273 && complete_p
5274 && !(cleared || num_nonzero_elements == 0)
5275 && !TREE_ADDRESSABLE (type))
5276 {
5277 HOST_WIDE_INT size = int_size_in_bytes (type);
5278 unsigned int align;
5279
5280 /* ??? We can still get unbounded array types, at least
5281 from the C++ front end. This seems wrong, but attempt
5282 to work around it for now. */
5283 if (size < 0)
5284 {
5285 size = int_size_in_bytes (TREE_TYPE (object));
5286 if (size >= 0)
5287 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5288 }
5289
5290 /* Find the maximum alignment we can assume for the object. */
5291 /* ??? Make use of DECL_OFFSET_ALIGN. */
5292 if (DECL_P (object))
5293 align = DECL_ALIGN (object);
5294 else
5295 align = TYPE_ALIGN (type);
5296
5297 /* Do a block move either if the size is so small as to make
5298 each individual move a sub-unit move on average, or if it
5299 is so large as to make individual moves inefficient. */
5300 if (size > 0
5301 && num_nonzero_elements > 1
5302 /* For ctors that have many repeated nonzero elements
5303 represented through RANGE_EXPRs, prefer initializing
5304 those through runtime loops over copies of large amounts
5305 of data from readonly data section. */
5306 && (num_unique_nonzero_elements
5307 > num_nonzero_elements / unique_nonzero_ratio
5308 || size <= min_unique_size)
5309 && (size < num_nonzero_elements
5310 || !can_move_by_pieces (size, align)))
5311 {
5312 if (notify_temp_creation)
5313 return GS_ERROR;
5314
5315 walk_tree (&ctor, force_labels_r, NULL, NULL);
5316 ctor = tree_output_constant_def (ctor);
5317 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5318 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5319 TREE_OPERAND (*expr_p, 1) = ctor;
5320
5321 /* This is no longer an assignment of a CONSTRUCTOR, but
5322 we still may have processing to do on the LHS. So
5323 pretend we didn't do anything here to let that happen. */
5324 return GS_UNHANDLED;
5325 }
5326 }
5327
5328 /* If a single access to the target must be ensured and there are
5329 nonzero elements or the zero elements are not assigned en masse,
5330 initialize the target from a temporary. */
5331 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5332 {
5333 if (notify_temp_creation)
5334 return GS_ERROR;
5335
5336 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5337 TREE_OPERAND (*expr_p, 0) = temp;
5338 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5339 *expr_p,
5340 build2 (MODIFY_EXPR, void_type_node,
5341 object, temp));
5342 return GS_OK;
5343 }
5344
5345 if (notify_temp_creation)
5346 return GS_OK;
5347
5348 /* If there are nonzero elements and if needed, pre-evaluate to capture
5349 elements overlapping with the lhs into temporaries. We must do this
5350 before clearing to fetch the values before they are zeroed-out. */
5351 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5352 {
5353 preeval_data.lhs_base_decl = get_base_address (object);
5354 if (!DECL_P (preeval_data.lhs_base_decl))
5355 preeval_data.lhs_base_decl = NULL;
5356 preeval_data.lhs_alias_set = get_alias_set (object);
5357
5358 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5359 pre_p, post_p, &preeval_data);
5360 }
5361
5362 bool ctor_has_side_effects_p
5363 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5364
5365 if (cleared)
5366 {
5367 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5368 Note that we still have to gimplify, in order to handle the
5369 case of variable sized types. Avoid shared tree structures. */
5370 CONSTRUCTOR_ELTS (ctor) = NULL;
5371 TREE_SIDE_EFFECTS (ctor) = 0;
5372 object = unshare_expr (object);
5373 gimplify_stmt (expr_p, pre_p);
5374 }
5375
5376 /* If we have not block cleared the object, or if there are nonzero
5377 elements in the constructor, or if the constructor has side effects,
5378 add assignments to the individual scalar fields of the object. */
5379 if (!cleared
5380 || num_nonzero_elements > 0
5381 || ctor_has_side_effects_p)
5382 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5383
5384 *expr_p = NULL_TREE;
5385 }
5386 break;
5387
5388 case COMPLEX_TYPE:
5389 {
5390 tree r, i;
5391
5392 if (notify_temp_creation)
5393 return GS_OK;
5394
5395 /* Extract the real and imaginary parts out of the ctor. */
5396 gcc_assert (elts->length () == 2);
5397 r = (*elts)[0].value;
5398 i = (*elts)[1].value;
5399 if (r == NULL || i == NULL)
5400 {
5401 tree zero = build_zero_cst (TREE_TYPE (type));
5402 if (r == NULL)
5403 r = zero;
5404 if (i == NULL)
5405 i = zero;
5406 }
5407
5408 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5409 represent creation of a complex value. */
5410 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5411 {
5412 ctor = build_complex (type, r, i);
5413 TREE_OPERAND (*expr_p, 1) = ctor;
5414 }
5415 else
5416 {
5417 ctor = build2 (COMPLEX_EXPR, type, r, i);
5418 TREE_OPERAND (*expr_p, 1) = ctor;
5419 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5420 pre_p,
5421 post_p,
5422 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5423 fb_rvalue);
5424 }
5425 }
5426 break;
5427
5428 case VECTOR_TYPE:
5429 {
5430 unsigned HOST_WIDE_INT ix;
5431 constructor_elt *ce;
5432
5433 if (notify_temp_creation)
5434 return GS_OK;
5435
5436 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5437 if (TREE_CONSTANT (ctor))
5438 {
5439 bool constant_p = true;
5440 tree value;
5441
5442 /* Even when ctor is constant, it might contain non-*_CST
5443 elements, such as addresses or trapping values like
5444 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5445 in VECTOR_CST nodes. */
5446 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5447 if (!CONSTANT_CLASS_P (value))
5448 {
5449 constant_p = false;
5450 break;
5451 }
5452
5453 if (constant_p)
5454 {
5455 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5456 break;
5457 }
5458
5459 TREE_CONSTANT (ctor) = 0;
5460 }
5461
5462 /* Vector types use CONSTRUCTOR all the way through gimple
5463 compilation as a general initializer. */
5464 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5465 {
5466 enum gimplify_status tret;
5467 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5468 fb_rvalue);
5469 if (tret == GS_ERROR)
5470 ret = GS_ERROR;
5471 else if (TREE_STATIC (ctor)
5472 && !initializer_constant_valid_p (ce->value,
5473 TREE_TYPE (ce->value)))
5474 TREE_STATIC (ctor) = 0;
5475 }
5476 recompute_constructor_flags (ctor);
5477 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5478 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5479 }
5480 break;
5481
5482 default:
5483 /* So how did we get a CONSTRUCTOR for a scalar type? */
5484 gcc_unreachable ();
5485 }
5486
5487 if (ret == GS_ERROR)
5488 return GS_ERROR;
5489 /* If we have gimplified both sides of the initializer but have
5490 not emitted an assignment, do so now. */
5491 if (*expr_p)
5492 {
5493 tree lhs = TREE_OPERAND (*expr_p, 0);
5494 tree rhs = TREE_OPERAND (*expr_p, 1);
5495 if (want_value && object == lhs)
5496 lhs = unshare_expr (lhs);
5497 gassign *init = gimple_build_assign (lhs, rhs);
5498 gimplify_seq_add_stmt (pre_p, init);
5499 }
5500 if (want_value)
5501 {
5502 *expr_p = object;
5503 ret = GS_OK;
5504 }
5505 else
5506 {
5507 *expr_p = NULL;
5508 ret = GS_ALL_DONE;
5509 }
5510
5511 /* If the user requests to initialize automatic variables, we
5512 should initialize paddings inside the variable. Add a call to
5513 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5514 initialize paddings of object always to zero regardless of
5515 INIT_TYPE. Note, we will not insert this call if the aggregate
5516 variable has be completely cleared already or it's initialized
5517 with an empty constructor. We cannot insert this call if the
5518 variable is a gimple register since __builtin_clear_padding will take
5519 the address of the variable. As a result, if a long double/_Complex long
5520 double variable will be spilled into stack later, its padding cannot
5521 be cleared with __builtin_clear_padding. We should clear its padding
5522 when it is spilled into memory. */
5523 if (is_init_expr
5524 && !is_gimple_reg (object)
5525 && clear_padding_type_may_have_padding_p (type)
5526 && ((AGGREGATE_TYPE_P (type) && !cleared && !is_empty_ctor)
5527 || !AGGREGATE_TYPE_P (type))
5528 && is_var_need_auto_init (object))
5529 gimple_add_padding_init_for_auto_var (object, false, pre_p);
5530
5531 return ret;
5532 }
5533
5534 /* Given a pointer value OP0, return a simplified version of an
5535 indirection through OP0, or NULL_TREE if no simplification is
5536 possible. This may only be applied to a rhs of an expression.
5537 Note that the resulting type may be different from the type pointed
5538 to in the sense that it is still compatible from the langhooks
5539 point of view. */
5540
5541 static tree
gimple_fold_indirect_ref_rhs(tree t)5542 gimple_fold_indirect_ref_rhs (tree t)
5543 {
5544 return gimple_fold_indirect_ref (t);
5545 }
5546
5547 /* Subroutine of gimplify_modify_expr to do simplifications of
5548 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5549 something changes. */
5550
5551 static enum gimplify_status
gimplify_modify_expr_rhs(tree * expr_p,tree * from_p,tree * to_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value)5552 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5553 gimple_seq *pre_p, gimple_seq *post_p,
5554 bool want_value)
5555 {
5556 enum gimplify_status ret = GS_UNHANDLED;
5557 bool changed;
5558
5559 do
5560 {
5561 changed = false;
5562 switch (TREE_CODE (*from_p))
5563 {
5564 case VAR_DECL:
5565 /* If we're assigning from a read-only variable initialized with
5566 a constructor and not volatile, do the direct assignment from
5567 the constructor, but only if the target is not volatile either
5568 since this latter assignment might end up being done on a per
5569 field basis. However, if the target is volatile and the type
5570 is aggregate and non-addressable, gimplify_init_constructor
5571 knows that it needs to ensure a single access to the target
5572 and it will return GS_OK only in this case. */
5573 if (TREE_READONLY (*from_p)
5574 && DECL_INITIAL (*from_p)
5575 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5576 && !TREE_THIS_VOLATILE (*from_p)
5577 && (!TREE_THIS_VOLATILE (*to_p)
5578 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5579 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5580 {
5581 tree old_from = *from_p;
5582 enum gimplify_status subret;
5583
5584 /* Move the constructor into the RHS. */
5585 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5586
5587 /* Let's see if gimplify_init_constructor will need to put
5588 it in memory. */
5589 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5590 false, true);
5591 if (subret == GS_ERROR)
5592 {
5593 /* If so, revert the change. */
5594 *from_p = old_from;
5595 }
5596 else
5597 {
5598 ret = GS_OK;
5599 changed = true;
5600 }
5601 }
5602 break;
5603 case INDIRECT_REF:
5604 {
5605 /* If we have code like
5606
5607 *(const A*)(A*)&x
5608
5609 where the type of "x" is a (possibly cv-qualified variant
5610 of "A"), treat the entire expression as identical to "x".
5611 This kind of code arises in C++ when an object is bound
5612 to a const reference, and if "x" is a TARGET_EXPR we want
5613 to take advantage of the optimization below. */
5614 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5615 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5616 if (t)
5617 {
5618 if (TREE_THIS_VOLATILE (t) != volatile_p)
5619 {
5620 if (DECL_P (t))
5621 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5622 build_fold_addr_expr (t));
5623 if (REFERENCE_CLASS_P (t))
5624 TREE_THIS_VOLATILE (t) = volatile_p;
5625 }
5626 *from_p = t;
5627 ret = GS_OK;
5628 changed = true;
5629 }
5630 break;
5631 }
5632
5633 case TARGET_EXPR:
5634 {
5635 /* If we are initializing something from a TARGET_EXPR, strip the
5636 TARGET_EXPR and initialize it directly, if possible. This can't
5637 be done if the initializer is void, since that implies that the
5638 temporary is set in some non-trivial way.
5639
5640 ??? What about code that pulls out the temp and uses it
5641 elsewhere? I think that such code never uses the TARGET_EXPR as
5642 an initializer. If I'm wrong, we'll die because the temp won't
5643 have any RTL. In that case, I guess we'll need to replace
5644 references somehow. */
5645 tree init = TARGET_EXPR_INITIAL (*from_p);
5646
5647 if (init
5648 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5649 || !TARGET_EXPR_NO_ELIDE (*from_p))
5650 && !VOID_TYPE_P (TREE_TYPE (init)))
5651 {
5652 *from_p = init;
5653 ret = GS_OK;
5654 changed = true;
5655 }
5656 }
5657 break;
5658
5659 case COMPOUND_EXPR:
5660 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5661 caught. */
5662 gimplify_compound_expr (from_p, pre_p, true);
5663 ret = GS_OK;
5664 changed = true;
5665 break;
5666
5667 case CONSTRUCTOR:
5668 /* If we already made some changes, let the front end have a
5669 crack at this before we break it down. */
5670 if (ret != GS_UNHANDLED)
5671 break;
5672
5673 /* If we're initializing from a CONSTRUCTOR, break this into
5674 individual MODIFY_EXPRs. */
5675 ret = gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5676 false);
5677 return ret;
5678
5679 case COND_EXPR:
5680 /* If we're assigning to a non-register type, push the assignment
5681 down into the branches. This is mandatory for ADDRESSABLE types,
5682 since we cannot generate temporaries for such, but it saves a
5683 copy in other cases as well. */
5684 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5685 {
5686 /* This code should mirror the code in gimplify_cond_expr. */
5687 enum tree_code code = TREE_CODE (*expr_p);
5688 tree cond = *from_p;
5689 tree result = *to_p;
5690
5691 ret = gimplify_expr (&result, pre_p, post_p,
5692 is_gimple_lvalue, fb_lvalue);
5693 if (ret != GS_ERROR)
5694 ret = GS_OK;
5695
5696 /* If we are going to write RESULT more than once, clear
5697 TREE_READONLY flag, otherwise we might incorrectly promote
5698 the variable to static const and initialize it at compile
5699 time in one of the branches. */
5700 if (VAR_P (result)
5701 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5702 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5703 TREE_READONLY (result) = 0;
5704 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5705 TREE_OPERAND (cond, 1)
5706 = build2 (code, void_type_node, result,
5707 TREE_OPERAND (cond, 1));
5708 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5709 TREE_OPERAND (cond, 2)
5710 = build2 (code, void_type_node, unshare_expr (result),
5711 TREE_OPERAND (cond, 2));
5712
5713 TREE_TYPE (cond) = void_type_node;
5714 recalculate_side_effects (cond);
5715
5716 if (want_value)
5717 {
5718 gimplify_and_add (cond, pre_p);
5719 *expr_p = unshare_expr (result);
5720 }
5721 else
5722 *expr_p = cond;
5723 return ret;
5724 }
5725 break;
5726
5727 case CALL_EXPR:
5728 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5729 return slot so that we don't generate a temporary. */
5730 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5731 && aggregate_value_p (*from_p, *from_p))
5732 {
5733 bool use_target;
5734
5735 if (!(rhs_predicate_for (*to_p))(*from_p))
5736 /* If we need a temporary, *to_p isn't accurate. */
5737 use_target = false;
5738 /* It's OK to use the return slot directly unless it's an NRV. */
5739 else if (TREE_CODE (*to_p) == RESULT_DECL
5740 && DECL_NAME (*to_p) == NULL_TREE
5741 && needs_to_live_in_memory (*to_p))
5742 use_target = true;
5743 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5744 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5745 /* Don't force regs into memory. */
5746 use_target = false;
5747 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5748 /* It's OK to use the target directly if it's being
5749 initialized. */
5750 use_target = true;
5751 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5752 != INTEGER_CST)
5753 /* Always use the target and thus RSO for variable-sized types.
5754 GIMPLE cannot deal with a variable-sized assignment
5755 embedded in a call statement. */
5756 use_target = true;
5757 else if (TREE_CODE (*to_p) != SSA_NAME
5758 && (!is_gimple_variable (*to_p)
5759 || needs_to_live_in_memory (*to_p)))
5760 /* Don't use the original target if it's already addressable;
5761 if its address escapes, and the called function uses the
5762 NRV optimization, a conforming program could see *to_p
5763 change before the called function returns; see c++/19317.
5764 When optimizing, the return_slot pass marks more functions
5765 as safe after we have escape info. */
5766 use_target = false;
5767 else
5768 use_target = true;
5769
5770 if (use_target)
5771 {
5772 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5773 mark_addressable (*to_p);
5774 }
5775 }
5776 break;
5777
5778 case WITH_SIZE_EXPR:
5779 /* Likewise for calls that return an aggregate of non-constant size,
5780 since we would not be able to generate a temporary at all. */
5781 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5782 {
5783 *from_p = TREE_OPERAND (*from_p, 0);
5784 /* We don't change ret in this case because the
5785 WITH_SIZE_EXPR might have been added in
5786 gimplify_modify_expr, so returning GS_OK would lead to an
5787 infinite loop. */
5788 changed = true;
5789 }
5790 break;
5791
5792 /* If we're initializing from a container, push the initialization
5793 inside it. */
5794 case CLEANUP_POINT_EXPR:
5795 case BIND_EXPR:
5796 case STATEMENT_LIST:
5797 {
5798 tree wrap = *from_p;
5799 tree t;
5800
5801 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5802 fb_lvalue);
5803 if (ret != GS_ERROR)
5804 ret = GS_OK;
5805
5806 t = voidify_wrapper_expr (wrap, *expr_p);
5807 gcc_assert (t == *expr_p);
5808
5809 if (want_value)
5810 {
5811 gimplify_and_add (wrap, pre_p);
5812 *expr_p = unshare_expr (*to_p);
5813 }
5814 else
5815 *expr_p = wrap;
5816 return GS_OK;
5817 }
5818
5819 case NOP_EXPR:
5820 /* Pull out compound literal expressions from a NOP_EXPR.
5821 Those are created in the C FE to drop qualifiers during
5822 lvalue conversion. */
5823 if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
5824 && tree_ssa_useless_type_conversion (*from_p))
5825 {
5826 *from_p = TREE_OPERAND (*from_p, 0);
5827 ret = GS_OK;
5828 changed = true;
5829 }
5830 break;
5831
5832 case COMPOUND_LITERAL_EXPR:
5833 {
5834 tree complit = TREE_OPERAND (*expr_p, 1);
5835 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5836 tree decl = DECL_EXPR_DECL (decl_s);
5837 tree init = DECL_INITIAL (decl);
5838
5839 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5840 into struct T x = { 0, 1, 2 } if the address of the
5841 compound literal has never been taken. */
5842 if (!TREE_ADDRESSABLE (complit)
5843 && !TREE_ADDRESSABLE (decl)
5844 && init)
5845 {
5846 *expr_p = copy_node (*expr_p);
5847 TREE_OPERAND (*expr_p, 1) = init;
5848 return GS_OK;
5849 }
5850 }
5851
5852 default:
5853 break;
5854 }
5855 }
5856 while (changed);
5857
5858 return ret;
5859 }
5860
5861
5862 /* Return true if T looks like a valid GIMPLE statement. */
5863
5864 static bool
is_gimple_stmt(tree t)5865 is_gimple_stmt (tree t)
5866 {
5867 const enum tree_code code = TREE_CODE (t);
5868
5869 switch (code)
5870 {
5871 case NOP_EXPR:
5872 /* The only valid NOP_EXPR is the empty statement. */
5873 return IS_EMPTY_STMT (t);
5874
5875 case BIND_EXPR:
5876 case COND_EXPR:
5877 /* These are only valid if they're void. */
5878 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5879
5880 case SWITCH_EXPR:
5881 case GOTO_EXPR:
5882 case RETURN_EXPR:
5883 case LABEL_EXPR:
5884 case CASE_LABEL_EXPR:
5885 case TRY_CATCH_EXPR:
5886 case TRY_FINALLY_EXPR:
5887 case EH_FILTER_EXPR:
5888 case CATCH_EXPR:
5889 case ASM_EXPR:
5890 case STATEMENT_LIST:
5891 case OACC_PARALLEL:
5892 case OACC_KERNELS:
5893 case OACC_SERIAL:
5894 case OACC_DATA:
5895 case OACC_HOST_DATA:
5896 case OACC_DECLARE:
5897 case OACC_UPDATE:
5898 case OACC_ENTER_DATA:
5899 case OACC_EXIT_DATA:
5900 case OACC_CACHE:
5901 case OMP_PARALLEL:
5902 case OMP_FOR:
5903 case OMP_SIMD:
5904 case OMP_DISTRIBUTE:
5905 case OMP_LOOP:
5906 case OACC_LOOP:
5907 case OMP_SCAN:
5908 case OMP_SCOPE:
5909 case OMP_SECTIONS:
5910 case OMP_SECTION:
5911 case OMP_SINGLE:
5912 case OMP_MASTER:
5913 case OMP_MASKED:
5914 case OMP_TASKGROUP:
5915 case OMP_ORDERED:
5916 case OMP_CRITICAL:
5917 case OMP_TASK:
5918 case OMP_TARGET:
5919 case OMP_TARGET_DATA:
5920 case OMP_TARGET_UPDATE:
5921 case OMP_TARGET_ENTER_DATA:
5922 case OMP_TARGET_EXIT_DATA:
5923 case OMP_TASKLOOP:
5924 case OMP_TEAMS:
5925 /* These are always void. */
5926 return true;
5927
5928 case CALL_EXPR:
5929 case MODIFY_EXPR:
5930 case PREDICT_EXPR:
5931 /* These are valid regardless of their type. */
5932 return true;
5933
5934 default:
5935 return false;
5936 }
5937 }
5938
5939
5940 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5941 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5942
5943 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5944 other, unmodified part of the complex object just before the total store.
5945 As a consequence, if the object is still uninitialized, an undefined value
5946 will be loaded into a register, which may result in a spurious exception
5947 if the register is floating-point and the value happens to be a signaling
5948 NaN for example. Then the fully-fledged complex operations lowering pass
5949 followed by a DCE pass are necessary in order to fix things up. */
5950
5951 static enum gimplify_status
gimplify_modify_expr_complex_part(tree * expr_p,gimple_seq * pre_p,bool want_value)5952 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5953 bool want_value)
5954 {
5955 enum tree_code code, ocode;
5956 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5957
5958 lhs = TREE_OPERAND (*expr_p, 0);
5959 rhs = TREE_OPERAND (*expr_p, 1);
5960 code = TREE_CODE (lhs);
5961 lhs = TREE_OPERAND (lhs, 0);
5962
5963 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5964 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5965 suppress_warning (other);
5966 other = get_formal_tmp_var (other, pre_p);
5967
5968 realpart = code == REALPART_EXPR ? rhs : other;
5969 imagpart = code == REALPART_EXPR ? other : rhs;
5970
5971 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5972 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5973 else
5974 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5975
5976 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5977 *expr_p = (want_value) ? rhs : NULL_TREE;
5978
5979 return GS_ALL_DONE;
5980 }
5981
5982 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5983
5984 modify_expr
5985 : varname '=' rhs
5986 | '*' ID '=' rhs
5987
5988 PRE_P points to the list where side effects that must happen before
5989 *EXPR_P should be stored.
5990
5991 POST_P points to the list where side effects that must happen after
5992 *EXPR_P should be stored.
5993
5994 WANT_VALUE is nonzero iff we want to use the value of this expression
5995 in another expression. */
5996
5997 static enum gimplify_status
gimplify_modify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value)5998 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5999 bool want_value)
6000 {
6001 tree *from_p = &TREE_OPERAND (*expr_p, 1);
6002 tree *to_p = &TREE_OPERAND (*expr_p, 0);
6003 enum gimplify_status ret = GS_UNHANDLED;
6004 gimple *assign;
6005 location_t loc = EXPR_LOCATION (*expr_p);
6006 gimple_stmt_iterator gsi;
6007
6008 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
6009 || TREE_CODE (*expr_p) == INIT_EXPR);
6010
6011 /* Trying to simplify a clobber using normal logic doesn't work,
6012 so handle it here. */
6013 if (TREE_CLOBBER_P (*from_p))
6014 {
6015 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6016 if (ret == GS_ERROR)
6017 return ret;
6018 gcc_assert (!want_value);
6019 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
6020 {
6021 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
6022 pre_p, post_p);
6023 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
6024 }
6025 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
6026 *expr_p = NULL;
6027 return GS_ALL_DONE;
6028 }
6029
6030 /* Insert pointer conversions required by the middle-end that are not
6031 required by the frontend. This fixes middle-end type checking for
6032 for example gcc.dg/redecl-6.c. */
6033 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
6034 {
6035 STRIP_USELESS_TYPE_CONVERSION (*from_p);
6036 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
6037 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
6038 }
6039
6040 /* See if any simplifications can be done based on what the RHS is. */
6041 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6042 want_value);
6043 if (ret != GS_UNHANDLED)
6044 return ret;
6045
6046 /* For empty types only gimplify the left hand side and right hand
6047 side as statements and throw away the assignment. Do this after
6048 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6049 types properly. */
6050 if (is_empty_type (TREE_TYPE (*from_p))
6051 && !want_value
6052 /* Don't do this for calls that return addressable types, expand_call
6053 relies on those having a lhs. */
6054 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
6055 && TREE_CODE (*from_p) == CALL_EXPR))
6056 {
6057 gimplify_stmt (from_p, pre_p);
6058 gimplify_stmt (to_p, pre_p);
6059 *expr_p = NULL_TREE;
6060 return GS_ALL_DONE;
6061 }
6062
6063 /* If the value being copied is of variable width, compute the length
6064 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6065 before gimplifying any of the operands so that we can resolve any
6066 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6067 the size of the expression to be copied, not of the destination, so
6068 that is what we must do here. */
6069 maybe_with_size_expr (from_p);
6070
6071 /* As a special case, we have to temporarily allow for assignments
6072 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6073 a toplevel statement, when gimplifying the GENERIC expression
6074 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6075 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6076
6077 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6078 prevent gimplify_expr from trying to create a new temporary for
6079 foo's LHS, we tell it that it should only gimplify until it
6080 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6081 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6082 and all we need to do here is set 'a' to be its LHS. */
6083
6084 /* Gimplify the RHS first for C++17 and bug 71104. */
6085 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
6086 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
6087 if (ret == GS_ERROR)
6088 return ret;
6089
6090 /* Then gimplify the LHS. */
6091 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6092 twice we have to make sure to gimplify into non-SSA as otherwise
6093 the abnormal edge added later will make those defs not dominate
6094 their uses.
6095 ??? Technically this applies only to the registers used in the
6096 resulting non-register *TO_P. */
6097 bool saved_into_ssa = gimplify_ctxp->into_ssa;
6098 if (saved_into_ssa
6099 && TREE_CODE (*from_p) == CALL_EXPR
6100 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
6101 gimplify_ctxp->into_ssa = false;
6102 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6103 gimplify_ctxp->into_ssa = saved_into_ssa;
6104 if (ret == GS_ERROR)
6105 return ret;
6106
6107 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6108 guess for the predicate was wrong. */
6109 gimple_predicate final_pred = rhs_predicate_for (*to_p);
6110 if (final_pred != initial_pred)
6111 {
6112 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
6113 if (ret == GS_ERROR)
6114 return ret;
6115 }
6116
6117 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6118 size as argument to the call. */
6119 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6120 {
6121 tree call = TREE_OPERAND (*from_p, 0);
6122 tree vlasize = TREE_OPERAND (*from_p, 1);
6123
6124 if (TREE_CODE (call) == CALL_EXPR
6125 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
6126 {
6127 int nargs = call_expr_nargs (call);
6128 tree type = TREE_TYPE (call);
6129 tree ap = CALL_EXPR_ARG (call, 0);
6130 tree tag = CALL_EXPR_ARG (call, 1);
6131 tree aptag = CALL_EXPR_ARG (call, 2);
6132 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
6133 IFN_VA_ARG, type,
6134 nargs + 1, ap, tag,
6135 aptag, vlasize);
6136 TREE_OPERAND (*from_p, 0) = newcall;
6137 }
6138 }
6139
6140 /* Now see if the above changed *from_p to something we handle specially. */
6141 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6142 want_value);
6143 if (ret != GS_UNHANDLED)
6144 return ret;
6145
6146 /* If we've got a variable sized assignment between two lvalues (i.e. does
6147 not involve a call), then we can make things a bit more straightforward
6148 by converting the assignment to memcpy or memset. */
6149 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6150 {
6151 tree from = TREE_OPERAND (*from_p, 0);
6152 tree size = TREE_OPERAND (*from_p, 1);
6153
6154 if (TREE_CODE (from) == CONSTRUCTOR)
6155 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
6156
6157 if (is_gimple_addressable (from))
6158 {
6159 *from_p = from;
6160 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
6161 pre_p);
6162 }
6163 }
6164
6165 /* Transform partial stores to non-addressable complex variables into
6166 total stores. This allows us to use real instead of virtual operands
6167 for these variables, which improves optimization. */
6168 if ((TREE_CODE (*to_p) == REALPART_EXPR
6169 || TREE_CODE (*to_p) == IMAGPART_EXPR)
6170 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
6171 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
6172
6173 /* Try to alleviate the effects of the gimplification creating artificial
6174 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6175 make sure not to create DECL_DEBUG_EXPR links across functions. */
6176 if (!gimplify_ctxp->into_ssa
6177 && VAR_P (*from_p)
6178 && DECL_IGNORED_P (*from_p)
6179 && DECL_P (*to_p)
6180 && !DECL_IGNORED_P (*to_p)
6181 && decl_function_context (*to_p) == current_function_decl
6182 && decl_function_context (*from_p) == current_function_decl)
6183 {
6184 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
6185 DECL_NAME (*from_p)
6186 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
6187 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
6188 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
6189 }
6190
6191 if (want_value && TREE_THIS_VOLATILE (*to_p))
6192 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
6193
6194 if (TREE_CODE (*from_p) == CALL_EXPR)
6195 {
6196 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6197 instead of a GIMPLE_ASSIGN. */
6198 gcall *call_stmt;
6199 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
6200 {
6201 /* Gimplify internal functions created in the FEs. */
6202 int nargs = call_expr_nargs (*from_p), i;
6203 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
6204 auto_vec<tree> vargs (nargs);
6205
6206 for (i = 0; i < nargs; i++)
6207 {
6208 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
6209 EXPR_LOCATION (*from_p));
6210 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
6211 }
6212 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
6213 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
6214 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
6215 }
6216 else
6217 {
6218 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
6219 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
6220 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
6221 tree fndecl = get_callee_fndecl (*from_p);
6222 if (fndecl
6223 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
6224 && call_expr_nargs (*from_p) == 3)
6225 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
6226 CALL_EXPR_ARG (*from_p, 0),
6227 CALL_EXPR_ARG (*from_p, 1),
6228 CALL_EXPR_ARG (*from_p, 2));
6229 else
6230 {
6231 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
6232 }
6233 }
6234 notice_special_calls (call_stmt);
6235 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
6236 gimple_call_set_lhs (call_stmt, *to_p);
6237 else if (TREE_CODE (*to_p) == SSA_NAME)
6238 /* The above is somewhat premature, avoid ICEing later for a
6239 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6240 ??? This doesn't make it a default-def. */
6241 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
6242
6243 assign = call_stmt;
6244 }
6245 else
6246 {
6247 assign = gimple_build_assign (*to_p, *from_p);
6248 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
6249 if (COMPARISON_CLASS_P (*from_p))
6250 copy_warning (assign, *from_p);
6251 }
6252
6253 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6254 {
6255 /* We should have got an SSA name from the start. */
6256 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
6257 || ! gimple_in_ssa_p (cfun));
6258 }
6259
6260 gimplify_seq_add_stmt (pre_p, assign);
6261 gsi = gsi_last (*pre_p);
6262 maybe_fold_stmt (&gsi);
6263
6264 if (want_value)
6265 {
6266 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
6267 return GS_OK;
6268 }
6269 else
6270 *expr_p = NULL;
6271
6272 return GS_ALL_DONE;
6273 }
6274
6275 /* Gimplify a comparison between two variable-sized objects. Do this
6276 with a call to BUILT_IN_MEMCMP. */
6277
6278 static enum gimplify_status
gimplify_variable_sized_compare(tree * expr_p)6279 gimplify_variable_sized_compare (tree *expr_p)
6280 {
6281 location_t loc = EXPR_LOCATION (*expr_p);
6282 tree op0 = TREE_OPERAND (*expr_p, 0);
6283 tree op1 = TREE_OPERAND (*expr_p, 1);
6284 tree t, arg, dest, src, expr;
6285
6286 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
6287 arg = unshare_expr (arg);
6288 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
6289 src = build_fold_addr_expr_loc (loc, op1);
6290 dest = build_fold_addr_expr_loc (loc, op0);
6291 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
6292 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
6293
6294 expr
6295 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
6296 SET_EXPR_LOCATION (expr, loc);
6297 *expr_p = expr;
6298
6299 return GS_OK;
6300 }
6301
6302 /* Gimplify a comparison between two aggregate objects of integral scalar
6303 mode as a comparison between the bitwise equivalent scalar values. */
6304
6305 static enum gimplify_status
gimplify_scalar_mode_aggregate_compare(tree * expr_p)6306 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
6307 {
6308 location_t loc = EXPR_LOCATION (*expr_p);
6309 tree op0 = TREE_OPERAND (*expr_p, 0);
6310 tree op1 = TREE_OPERAND (*expr_p, 1);
6311
6312 tree type = TREE_TYPE (op0);
6313 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6314
6315 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6316 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6317
6318 *expr_p
6319 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6320
6321 return GS_OK;
6322 }
6323
6324 /* Gimplify an expression sequence. This function gimplifies each
6325 expression and rewrites the original expression with the last
6326 expression of the sequence in GIMPLE form.
6327
6328 PRE_P points to the list where the side effects for all the
6329 expressions in the sequence will be emitted.
6330
6331 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6332
6333 static enum gimplify_status
gimplify_compound_expr(tree * expr_p,gimple_seq * pre_p,bool want_value)6334 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6335 {
6336 tree t = *expr_p;
6337
6338 do
6339 {
6340 tree *sub_p = &TREE_OPERAND (t, 0);
6341
6342 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6343 gimplify_compound_expr (sub_p, pre_p, false);
6344 else
6345 gimplify_stmt (sub_p, pre_p);
6346
6347 t = TREE_OPERAND (t, 1);
6348 }
6349 while (TREE_CODE (t) == COMPOUND_EXPR);
6350
6351 *expr_p = t;
6352 if (want_value)
6353 return GS_OK;
6354 else
6355 {
6356 gimplify_stmt (expr_p, pre_p);
6357 return GS_ALL_DONE;
6358 }
6359 }
6360
6361 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6362 gimplify. After gimplification, EXPR_P will point to a new temporary
6363 that holds the original value of the SAVE_EXPR node.
6364
6365 PRE_P points to the list where side effects that must happen before
6366 *EXPR_P should be stored. */
6367
6368 static enum gimplify_status
gimplify_save_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)6369 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6370 {
6371 enum gimplify_status ret = GS_ALL_DONE;
6372 tree val;
6373
6374 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6375 val = TREE_OPERAND (*expr_p, 0);
6376
6377 if (val && TREE_TYPE (val) == error_mark_node)
6378 return GS_ERROR;
6379
6380 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6381 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6382 {
6383 /* The operand may be a void-valued expression. It is
6384 being executed only for its side-effects. */
6385 if (TREE_TYPE (val) == void_type_node)
6386 {
6387 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6388 is_gimple_stmt, fb_none);
6389 val = NULL;
6390 }
6391 else
6392 /* The temporary may not be an SSA name as later abnormal and EH
6393 control flow may invalidate use/def domination. When in SSA
6394 form then assume there are no such issues and SAVE_EXPRs only
6395 appear via GENERIC foldings. */
6396 val = get_initialized_tmp_var (val, pre_p, post_p,
6397 gimple_in_ssa_p (cfun));
6398
6399 TREE_OPERAND (*expr_p, 0) = val;
6400 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6401 }
6402
6403 *expr_p = val;
6404
6405 return ret;
6406 }
6407
6408 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6409
6410 unary_expr
6411 : ...
6412 | '&' varname
6413 ...
6414
6415 PRE_P points to the list where side effects that must happen before
6416 *EXPR_P should be stored.
6417
6418 POST_P points to the list where side effects that must happen after
6419 *EXPR_P should be stored. */
6420
6421 static enum gimplify_status
gimplify_addr_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)6422 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6423 {
6424 tree expr = *expr_p;
6425 tree op0 = TREE_OPERAND (expr, 0);
6426 enum gimplify_status ret;
6427 location_t loc = EXPR_LOCATION (*expr_p);
6428
6429 switch (TREE_CODE (op0))
6430 {
6431 case INDIRECT_REF:
6432 do_indirect_ref:
6433 /* Check if we are dealing with an expression of the form '&*ptr'.
6434 While the front end folds away '&*ptr' into 'ptr', these
6435 expressions may be generated internally by the compiler (e.g.,
6436 builtins like __builtin_va_end). */
6437 /* Caution: the silent array decomposition semantics we allow for
6438 ADDR_EXPR means we can't always discard the pair. */
6439 /* Gimplification of the ADDR_EXPR operand may drop
6440 cv-qualification conversions, so make sure we add them if
6441 needed. */
6442 {
6443 tree op00 = TREE_OPERAND (op0, 0);
6444 tree t_expr = TREE_TYPE (expr);
6445 tree t_op00 = TREE_TYPE (op00);
6446
6447 if (!useless_type_conversion_p (t_expr, t_op00))
6448 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6449 *expr_p = op00;
6450 ret = GS_OK;
6451 }
6452 break;
6453
6454 case VIEW_CONVERT_EXPR:
6455 /* Take the address of our operand and then convert it to the type of
6456 this ADDR_EXPR.
6457
6458 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6459 all clear. The impact of this transformation is even less clear. */
6460
6461 /* If the operand is a useless conversion, look through it. Doing so
6462 guarantees that the ADDR_EXPR and its operand will remain of the
6463 same type. */
6464 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6465 op0 = TREE_OPERAND (op0, 0);
6466
6467 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6468 build_fold_addr_expr_loc (loc,
6469 TREE_OPERAND (op0, 0)));
6470 ret = GS_OK;
6471 break;
6472
6473 case MEM_REF:
6474 if (integer_zerop (TREE_OPERAND (op0, 1)))
6475 goto do_indirect_ref;
6476
6477 /* fall through */
6478
6479 default:
6480 /* If we see a call to a declared builtin or see its address
6481 being taken (we can unify those cases here) then we can mark
6482 the builtin for implicit generation by GCC. */
6483 if (TREE_CODE (op0) == FUNCTION_DECL
6484 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6485 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6486 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6487
6488 /* We use fb_either here because the C frontend sometimes takes
6489 the address of a call that returns a struct; see
6490 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6491 the implied temporary explicit. */
6492
6493 /* Make the operand addressable. */
6494 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6495 is_gimple_addressable, fb_either);
6496 if (ret == GS_ERROR)
6497 break;
6498
6499 /* Then mark it. Beware that it may not be possible to do so directly
6500 if a temporary has been created by the gimplification. */
6501 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6502
6503 op0 = TREE_OPERAND (expr, 0);
6504
6505 /* For various reasons, the gimplification of the expression
6506 may have made a new INDIRECT_REF. */
6507 if (TREE_CODE (op0) == INDIRECT_REF
6508 || (TREE_CODE (op0) == MEM_REF
6509 && integer_zerop (TREE_OPERAND (op0, 1))))
6510 goto do_indirect_ref;
6511
6512 mark_addressable (TREE_OPERAND (expr, 0));
6513
6514 /* The FEs may end up building ADDR_EXPRs early on a decl with
6515 an incomplete type. Re-build ADDR_EXPRs in canonical form
6516 here. */
6517 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6518 *expr_p = build_fold_addr_expr (op0);
6519
6520 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6521 recompute_tree_invariant_for_addr_expr (*expr_p);
6522
6523 /* If we re-built the ADDR_EXPR add a conversion to the original type
6524 if required. */
6525 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6526 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6527
6528 break;
6529 }
6530
6531 return ret;
6532 }
6533
6534 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6535 value; output operands should be a gimple lvalue. */
6536
6537 static enum gimplify_status
gimplify_asm_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)6538 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6539 {
6540 tree expr;
6541 int noutputs;
6542 const char **oconstraints;
6543 int i;
6544 tree link;
6545 const char *constraint;
6546 bool allows_mem, allows_reg, is_inout;
6547 enum gimplify_status ret, tret;
6548 gasm *stmt;
6549 vec<tree, va_gc> *inputs;
6550 vec<tree, va_gc> *outputs;
6551 vec<tree, va_gc> *clobbers;
6552 vec<tree, va_gc> *labels;
6553 tree link_next;
6554
6555 expr = *expr_p;
6556 noutputs = list_length (ASM_OUTPUTS (expr));
6557 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6558
6559 inputs = NULL;
6560 outputs = NULL;
6561 clobbers = NULL;
6562 labels = NULL;
6563
6564 ret = GS_ALL_DONE;
6565 link_next = NULL_TREE;
6566 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6567 {
6568 bool ok;
6569 size_t constraint_len;
6570
6571 link_next = TREE_CHAIN (link);
6572
6573 oconstraints[i]
6574 = constraint
6575 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6576 constraint_len = strlen (constraint);
6577 if (constraint_len == 0)
6578 continue;
6579
6580 ok = parse_output_constraint (&constraint, i, 0, 0,
6581 &allows_mem, &allows_reg, &is_inout);
6582 if (!ok)
6583 {
6584 ret = GS_ERROR;
6585 is_inout = false;
6586 }
6587
6588 /* If we can't make copies, we can only accept memory.
6589 Similarly for VLAs. */
6590 tree outtype = TREE_TYPE (TREE_VALUE (link));
6591 if (outtype != error_mark_node
6592 && (TREE_ADDRESSABLE (outtype)
6593 || !COMPLETE_TYPE_P (outtype)
6594 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6595 {
6596 if (allows_mem)
6597 allows_reg = 0;
6598 else
6599 {
6600 error ("impossible constraint in %<asm%>");
6601 error ("non-memory output %d must stay in memory", i);
6602 return GS_ERROR;
6603 }
6604 }
6605
6606 if (!allows_reg && allows_mem)
6607 mark_addressable (TREE_VALUE (link));
6608
6609 tree orig = TREE_VALUE (link);
6610 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6611 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6612 fb_lvalue | fb_mayfail);
6613 if (tret == GS_ERROR)
6614 {
6615 if (orig != error_mark_node)
6616 error ("invalid lvalue in %<asm%> output %d", i);
6617 ret = tret;
6618 }
6619
6620 /* If the constraint does not allow memory make sure we gimplify
6621 it to a register if it is not already but its base is. This
6622 happens for complex and vector components. */
6623 if (!allows_mem)
6624 {
6625 tree op = TREE_VALUE (link);
6626 if (! is_gimple_val (op)
6627 && is_gimple_reg_type (TREE_TYPE (op))
6628 && is_gimple_reg (get_base_address (op)))
6629 {
6630 tree tem = create_tmp_reg (TREE_TYPE (op));
6631 tree ass;
6632 if (is_inout)
6633 {
6634 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6635 tem, unshare_expr (op));
6636 gimplify_and_add (ass, pre_p);
6637 }
6638 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6639 gimplify_and_add (ass, post_p);
6640
6641 TREE_VALUE (link) = tem;
6642 tret = GS_OK;
6643 }
6644 }
6645
6646 vec_safe_push (outputs, link);
6647 TREE_CHAIN (link) = NULL_TREE;
6648
6649 if (is_inout)
6650 {
6651 /* An input/output operand. To give the optimizers more
6652 flexibility, split it into separate input and output
6653 operands. */
6654 tree input;
6655 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6656 char buf[11];
6657
6658 /* Turn the in/out constraint into an output constraint. */
6659 char *p = xstrdup (constraint);
6660 p[0] = '=';
6661 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6662
6663 /* And add a matching input constraint. */
6664 if (allows_reg)
6665 {
6666 sprintf (buf, "%u", i);
6667
6668 /* If there are multiple alternatives in the constraint,
6669 handle each of them individually. Those that allow register
6670 will be replaced with operand number, the others will stay
6671 unchanged. */
6672 if (strchr (p, ',') != NULL)
6673 {
6674 size_t len = 0, buflen = strlen (buf);
6675 char *beg, *end, *str, *dst;
6676
6677 for (beg = p + 1;;)
6678 {
6679 end = strchr (beg, ',');
6680 if (end == NULL)
6681 end = strchr (beg, '\0');
6682 if ((size_t) (end - beg) < buflen)
6683 len += buflen + 1;
6684 else
6685 len += end - beg + 1;
6686 if (*end)
6687 beg = end + 1;
6688 else
6689 break;
6690 }
6691
6692 str = (char *) alloca (len);
6693 for (beg = p + 1, dst = str;;)
6694 {
6695 const char *tem;
6696 bool mem_p, reg_p, inout_p;
6697
6698 end = strchr (beg, ',');
6699 if (end)
6700 *end = '\0';
6701 beg[-1] = '=';
6702 tem = beg - 1;
6703 parse_output_constraint (&tem, i, 0, 0,
6704 &mem_p, ®_p, &inout_p);
6705 if (dst != str)
6706 *dst++ = ',';
6707 if (reg_p)
6708 {
6709 memcpy (dst, buf, buflen);
6710 dst += buflen;
6711 }
6712 else
6713 {
6714 if (end)
6715 len = end - beg;
6716 else
6717 len = strlen (beg);
6718 memcpy (dst, beg, len);
6719 dst += len;
6720 }
6721 if (end)
6722 beg = end + 1;
6723 else
6724 break;
6725 }
6726 *dst = '\0';
6727 input = build_string (dst - str, str);
6728 }
6729 else
6730 input = build_string (strlen (buf), buf);
6731 }
6732 else
6733 input = build_string (constraint_len - 1, constraint + 1);
6734
6735 free (p);
6736
6737 input = build_tree_list (build_tree_list (NULL_TREE, input),
6738 unshare_expr (TREE_VALUE (link)));
6739 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6740 }
6741 }
6742
6743 link_next = NULL_TREE;
6744 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6745 {
6746 link_next = TREE_CHAIN (link);
6747 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6748 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6749 oconstraints, &allows_mem, &allows_reg);
6750
6751 /* If we can't make copies, we can only accept memory. */
6752 tree intype = TREE_TYPE (TREE_VALUE (link));
6753 if (intype != error_mark_node
6754 && (TREE_ADDRESSABLE (intype)
6755 || !COMPLETE_TYPE_P (intype)
6756 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6757 {
6758 if (allows_mem)
6759 allows_reg = 0;
6760 else
6761 {
6762 error ("impossible constraint in %<asm%>");
6763 error ("non-memory input %d must stay in memory", i);
6764 return GS_ERROR;
6765 }
6766 }
6767
6768 /* If the operand is a memory input, it should be an lvalue. */
6769 if (!allows_reg && allows_mem)
6770 {
6771 tree inputv = TREE_VALUE (link);
6772 STRIP_NOPS (inputv);
6773 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6774 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6775 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6776 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6777 || TREE_CODE (inputv) == MODIFY_EXPR)
6778 TREE_VALUE (link) = error_mark_node;
6779 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6780 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6781 if (tret != GS_ERROR)
6782 {
6783 /* Unlike output operands, memory inputs are not guaranteed
6784 to be lvalues by the FE, and while the expressions are
6785 marked addressable there, if it is e.g. a statement
6786 expression, temporaries in it might not end up being
6787 addressable. They might be already used in the IL and thus
6788 it is too late to make them addressable now though. */
6789 tree x = TREE_VALUE (link);
6790 while (handled_component_p (x))
6791 x = TREE_OPERAND (x, 0);
6792 if (TREE_CODE (x) == MEM_REF
6793 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6794 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6795 if ((VAR_P (x)
6796 || TREE_CODE (x) == PARM_DECL
6797 || TREE_CODE (x) == RESULT_DECL)
6798 && !TREE_ADDRESSABLE (x)
6799 && is_gimple_reg (x))
6800 {
6801 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6802 input_location), 0,
6803 "memory input %d is not directly addressable",
6804 i);
6805 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6806 }
6807 }
6808 mark_addressable (TREE_VALUE (link));
6809 if (tret == GS_ERROR)
6810 {
6811 if (inputv != error_mark_node)
6812 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6813 "memory input %d is not directly addressable", i);
6814 ret = tret;
6815 }
6816 }
6817 else
6818 {
6819 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6820 is_gimple_asm_val, fb_rvalue);
6821 if (tret == GS_ERROR)
6822 ret = tret;
6823 }
6824
6825 TREE_CHAIN (link) = NULL_TREE;
6826 vec_safe_push (inputs, link);
6827 }
6828
6829 link_next = NULL_TREE;
6830 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6831 {
6832 link_next = TREE_CHAIN (link);
6833 TREE_CHAIN (link) = NULL_TREE;
6834 vec_safe_push (clobbers, link);
6835 }
6836
6837 link_next = NULL_TREE;
6838 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6839 {
6840 link_next = TREE_CHAIN (link);
6841 TREE_CHAIN (link) = NULL_TREE;
6842 vec_safe_push (labels, link);
6843 }
6844
6845 /* Do not add ASMs with errors to the gimple IL stream. */
6846 if (ret != GS_ERROR)
6847 {
6848 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6849 inputs, outputs, clobbers, labels);
6850
6851 /* asm is volatile if it was marked by the user as volatile or
6852 there are no outputs or this is an asm goto. */
6853 gimple_asm_set_volatile (stmt,
6854 ASM_VOLATILE_P (expr)
6855 || noutputs == 0
6856 || labels);
6857 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6858 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6859
6860 gimplify_seq_add_stmt (pre_p, stmt);
6861 }
6862
6863 return ret;
6864 }
6865
6866 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6867 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6868 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6869 return to this function.
6870
6871 FIXME should we complexify the prequeue handling instead? Or use flags
6872 for all the cleanups and let the optimizer tighten them up? The current
6873 code seems pretty fragile; it will break on a cleanup within any
6874 non-conditional nesting. But any such nesting would be broken, anyway;
6875 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6876 and continues out of it. We can do that at the RTL level, though, so
6877 having an optimizer to tighten up try/finally regions would be a Good
6878 Thing. */
6879
6880 static enum gimplify_status
gimplify_cleanup_point_expr(tree * expr_p,gimple_seq * pre_p)6881 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6882 {
6883 gimple_stmt_iterator iter;
6884 gimple_seq body_sequence = NULL;
6885
6886 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6887
6888 /* We only care about the number of conditions between the innermost
6889 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6890 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6891 int old_conds = gimplify_ctxp->conditions;
6892 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6893 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6894 gimplify_ctxp->conditions = 0;
6895 gimplify_ctxp->conditional_cleanups = NULL;
6896 gimplify_ctxp->in_cleanup_point_expr = true;
6897
6898 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6899
6900 gimplify_ctxp->conditions = old_conds;
6901 gimplify_ctxp->conditional_cleanups = old_cleanups;
6902 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6903
6904 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6905 {
6906 gimple *wce = gsi_stmt (iter);
6907
6908 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6909 {
6910 if (gsi_one_before_end_p (iter))
6911 {
6912 /* Note that gsi_insert_seq_before and gsi_remove do not
6913 scan operands, unlike some other sequence mutators. */
6914 if (!gimple_wce_cleanup_eh_only (wce))
6915 gsi_insert_seq_before_without_update (&iter,
6916 gimple_wce_cleanup (wce),
6917 GSI_SAME_STMT);
6918 gsi_remove (&iter, true);
6919 break;
6920 }
6921 else
6922 {
6923 gtry *gtry;
6924 gimple_seq seq;
6925 enum gimple_try_flags kind;
6926
6927 if (gimple_wce_cleanup_eh_only (wce))
6928 kind = GIMPLE_TRY_CATCH;
6929 else
6930 kind = GIMPLE_TRY_FINALLY;
6931 seq = gsi_split_seq_after (iter);
6932
6933 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6934 /* Do not use gsi_replace here, as it may scan operands.
6935 We want to do a simple structural modification only. */
6936 gsi_set_stmt (&iter, gtry);
6937 iter = gsi_start (gtry->eval);
6938 }
6939 }
6940 else
6941 gsi_next (&iter);
6942 }
6943
6944 gimplify_seq_add_seq (pre_p, body_sequence);
6945 if (temp)
6946 {
6947 *expr_p = temp;
6948 return GS_OK;
6949 }
6950 else
6951 {
6952 *expr_p = NULL;
6953 return GS_ALL_DONE;
6954 }
6955 }
6956
6957 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6958 is the cleanup action required. EH_ONLY is true if the cleanup should
6959 only be executed if an exception is thrown, not on normal exit.
6960 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6961 only valid for clobbers. */
6962
6963 static void
gimple_push_cleanup(tree var,tree cleanup,bool eh_only,gimple_seq * pre_p,bool force_uncond=false)6964 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6965 bool force_uncond = false)
6966 {
6967 gimple *wce;
6968 gimple_seq cleanup_stmts = NULL;
6969
6970 /* Errors can result in improperly nested cleanups. Which results in
6971 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6972 if (seen_error ())
6973 return;
6974
6975 if (gimple_conditional_context ())
6976 {
6977 /* If we're in a conditional context, this is more complex. We only
6978 want to run the cleanup if we actually ran the initialization that
6979 necessitates it, but we want to run it after the end of the
6980 conditional context. So we wrap the try/finally around the
6981 condition and use a flag to determine whether or not to actually
6982 run the destructor. Thus
6983
6984 test ? f(A()) : 0
6985
6986 becomes (approximately)
6987
6988 flag = 0;
6989 try {
6990 if (test) { A::A(temp); flag = 1; val = f(temp); }
6991 else { val = 0; }
6992 } finally {
6993 if (flag) A::~A(temp);
6994 }
6995 val
6996 */
6997 if (force_uncond)
6998 {
6999 gimplify_stmt (&cleanup, &cleanup_stmts);
7000 wce = gimple_build_wce (cleanup_stmts);
7001 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7002 }
7003 else
7004 {
7005 tree flag = create_tmp_var (boolean_type_node, "cleanup");
7006 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
7007 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
7008
7009 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
7010 gimplify_stmt (&cleanup, &cleanup_stmts);
7011 wce = gimple_build_wce (cleanup_stmts);
7012 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7013
7014 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
7015 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7016 gimplify_seq_add_stmt (pre_p, ftrue);
7017
7018 /* Because of this manipulation, and the EH edges that jump
7019 threading cannot redirect, the temporary (VAR) will appear
7020 to be used uninitialized. Don't warn. */
7021 suppress_warning (var, OPT_Wuninitialized);
7022 }
7023 }
7024 else
7025 {
7026 gimplify_stmt (&cleanup, &cleanup_stmts);
7027 wce = gimple_build_wce (cleanup_stmts);
7028 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7029 gimplify_seq_add_stmt (pre_p, wce);
7030 }
7031 }
7032
7033 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7034
7035 static enum gimplify_status
gimplify_target_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)7036 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7037 {
7038 tree targ = *expr_p;
7039 tree temp = TARGET_EXPR_SLOT (targ);
7040 tree init = TARGET_EXPR_INITIAL (targ);
7041 enum gimplify_status ret;
7042
7043 bool unpoison_empty_seq = false;
7044 gimple_stmt_iterator unpoison_it;
7045
7046 if (init)
7047 {
7048 gimple_seq init_pre_p = NULL;
7049
7050 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7051 to the temps list. Handle also variable length TARGET_EXPRs. */
7052 if (!poly_int_tree_p (DECL_SIZE (temp)))
7053 {
7054 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
7055 gimplify_type_sizes (TREE_TYPE (temp), &init_pre_p);
7056 /* FIXME: this is correct only when the size of the type does
7057 not depend on expressions evaluated in init. */
7058 gimplify_vla_decl (temp, &init_pre_p);
7059 }
7060 else
7061 {
7062 /* Save location where we need to place unpoisoning. It's possible
7063 that a variable will be converted to needs_to_live_in_memory. */
7064 unpoison_it = gsi_last (*pre_p);
7065 unpoison_empty_seq = gsi_end_p (unpoison_it);
7066
7067 gimple_add_tmp_var (temp);
7068 }
7069
7070 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7071 expression is supposed to initialize the slot. */
7072 if (VOID_TYPE_P (TREE_TYPE (init)))
7073 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7074 fb_none);
7075 else
7076 {
7077 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
7078 init = init_expr;
7079 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7080 fb_none);
7081 init = NULL;
7082 ggc_free (init_expr);
7083 }
7084 if (ret == GS_ERROR)
7085 {
7086 /* PR c++/28266 Make sure this is expanded only once. */
7087 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7088 return GS_ERROR;
7089 }
7090
7091 if (init)
7092 gimplify_and_add (init, &init_pre_p);
7093
7094 /* Add a clobber for the temporary going out of scope, like
7095 gimplify_bind_expr. */
7096 if (gimplify_ctxp->in_cleanup_point_expr
7097 && needs_to_live_in_memory (temp))
7098 {
7099 if (flag_stack_reuse == SR_ALL)
7100 {
7101 tree clobber = build_clobber (TREE_TYPE (temp), CLOBBER_EOL);
7102 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
7103 gimple_push_cleanup (temp, clobber, false, pre_p, true);
7104 }
7105 if (asan_poisoned_variables
7106 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
7107 && !TREE_STATIC (temp)
7108 && dbg_cnt (asan_use_after_scope)
7109 && !gimplify_omp_ctxp)
7110 {
7111 tree asan_cleanup = build_asan_poison_call_expr (temp);
7112 if (asan_cleanup)
7113 {
7114 if (unpoison_empty_seq)
7115 unpoison_it = gsi_start (*pre_p);
7116
7117 asan_poison_variable (temp, false, &unpoison_it,
7118 unpoison_empty_seq);
7119 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
7120 }
7121 }
7122 }
7123
7124 gimple_seq_add_seq (pre_p, init_pre_p);
7125
7126 /* If needed, push the cleanup for the temp. */
7127 if (TARGET_EXPR_CLEANUP (targ))
7128 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
7129 CLEANUP_EH_ONLY (targ), pre_p);
7130
7131 /* Only expand this once. */
7132 TREE_OPERAND (targ, 3) = init;
7133 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7134 }
7135 else
7136 /* We should have expanded this before. */
7137 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
7138
7139 *expr_p = temp;
7140 return GS_OK;
7141 }
7142
7143 /* Gimplification of expression trees. */
7144
7145 /* Gimplify an expression which appears at statement context. The
7146 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7147 NULL, a new sequence is allocated.
7148
7149 Return true if we actually added a statement to the queue. */
7150
7151 bool
gimplify_stmt(tree * stmt_p,gimple_seq * seq_p)7152 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
7153 {
7154 gimple_seq_node last;
7155
7156 last = gimple_seq_last (*seq_p);
7157 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
7158 return last != gimple_seq_last (*seq_p);
7159 }
7160
7161 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7162 to CTX. If entries already exist, force them to be some flavor of private.
7163 If there is no enclosing parallel, do nothing. */
7164
7165 void
omp_firstprivatize_variable(struct gimplify_omp_ctx * ctx,tree decl)7166 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
7167 {
7168 splay_tree_node n;
7169
7170 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
7171 return;
7172
7173 do
7174 {
7175 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7176 if (n != NULL)
7177 {
7178 if (n->value & GOVD_SHARED)
7179 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
7180 else if (n->value & GOVD_MAP)
7181 n->value |= GOVD_MAP_TO_ONLY;
7182 else
7183 return;
7184 }
7185 else if ((ctx->region_type & ORT_TARGET) != 0)
7186 {
7187 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
7188 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7189 else
7190 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
7191 }
7192 else if (ctx->region_type != ORT_WORKSHARE
7193 && ctx->region_type != ORT_TASKGROUP
7194 && ctx->region_type != ORT_SIMD
7195 && ctx->region_type != ORT_ACC
7196 && !(ctx->region_type & ORT_TARGET_DATA))
7197 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7198
7199 ctx = ctx->outer_context;
7200 }
7201 while (ctx);
7202 }
7203
7204 /* Similarly for each of the type sizes of TYPE. */
7205
7206 static void
omp_firstprivatize_type_sizes(struct gimplify_omp_ctx * ctx,tree type)7207 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
7208 {
7209 if (type == NULL || type == error_mark_node)
7210 return;
7211 type = TYPE_MAIN_VARIANT (type);
7212
7213 if (ctx->privatized_types->add (type))
7214 return;
7215
7216 switch (TREE_CODE (type))
7217 {
7218 case INTEGER_TYPE:
7219 case ENUMERAL_TYPE:
7220 case BOOLEAN_TYPE:
7221 case REAL_TYPE:
7222 case FIXED_POINT_TYPE:
7223 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
7224 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
7225 break;
7226
7227 case ARRAY_TYPE:
7228 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7229 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
7230 break;
7231
7232 case RECORD_TYPE:
7233 case UNION_TYPE:
7234 case QUAL_UNION_TYPE:
7235 {
7236 tree field;
7237 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7238 if (TREE_CODE (field) == FIELD_DECL)
7239 {
7240 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
7241 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
7242 }
7243 }
7244 break;
7245
7246 case POINTER_TYPE:
7247 case REFERENCE_TYPE:
7248 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7249 break;
7250
7251 default:
7252 break;
7253 }
7254
7255 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
7256 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
7257 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
7258 }
7259
7260 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7261
7262 static void
omp_add_variable(struct gimplify_omp_ctx * ctx,tree decl,unsigned int flags)7263 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
7264 {
7265 splay_tree_node n;
7266 unsigned int nflags;
7267 tree t;
7268
7269 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
7270 return;
7271
7272 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7273 there are constructors involved somewhere. Exception is a shared clause,
7274 there is nothing privatized in that case. */
7275 if ((flags & GOVD_SHARED) == 0
7276 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
7277 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
7278 flags |= GOVD_SEEN;
7279
7280 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7281 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7282 {
7283 /* We shouldn't be re-adding the decl with the same data
7284 sharing class. */
7285 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
7286 nflags = n->value | flags;
7287 /* The only combination of data sharing classes we should see is
7288 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7289 reduction variables to be used in data sharing clauses. */
7290 gcc_assert ((ctx->region_type & ORT_ACC) != 0
7291 || ((nflags & GOVD_DATA_SHARE_CLASS)
7292 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
7293 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
7294 n->value = nflags;
7295 return;
7296 }
7297
7298 /* When adding a variable-sized variable, we have to handle all sorts
7299 of additional bits of data: the pointer replacement variable, and
7300 the parameters of the type. */
7301 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7302 {
7303 /* Add the pointer replacement variable as PRIVATE if the variable
7304 replacement is private, else FIRSTPRIVATE since we'll need the
7305 address of the original variable either for SHARED, or for the
7306 copy into or out of the context. */
7307 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
7308 {
7309 if (flags & GOVD_MAP)
7310 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
7311 else if (flags & GOVD_PRIVATE)
7312 nflags = GOVD_PRIVATE;
7313 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7314 && (flags & GOVD_FIRSTPRIVATE))
7315 || (ctx->region_type == ORT_TARGET_DATA
7316 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
7317 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
7318 else
7319 nflags = GOVD_FIRSTPRIVATE;
7320 nflags |= flags & GOVD_SEEN;
7321 t = DECL_VALUE_EXPR (decl);
7322 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7323 t = TREE_OPERAND (t, 0);
7324 gcc_assert (DECL_P (t));
7325 omp_add_variable (ctx, t, nflags);
7326 }
7327
7328 /* Add all of the variable and type parameters (which should have
7329 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7330 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7331 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7332 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7333
7334 /* The variable-sized variable itself is never SHARED, only some form
7335 of PRIVATE. The sharing would take place via the pointer variable
7336 which we remapped above. */
7337 if (flags & GOVD_SHARED)
7338 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7339 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7340
7341 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7342 alloca statement we generate for the variable, so make sure it
7343 is available. This isn't automatically needed for the SHARED
7344 case, since we won't be allocating local storage then.
7345 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7346 in this case omp_notice_variable will be called later
7347 on when it is gimplified. */
7348 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7349 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7350 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7351 }
7352 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7353 && omp_privatize_by_reference (decl))
7354 {
7355 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7356
7357 /* Similar to the direct variable sized case above, we'll need the
7358 size of references being privatized. */
7359 if ((flags & GOVD_SHARED) == 0)
7360 {
7361 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7362 if (t && DECL_P (t))
7363 omp_notice_variable (ctx, t, true);
7364 }
7365 }
7366
7367 if (n != NULL)
7368 n->value |= flags;
7369 else
7370 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7371
7372 /* For reductions clauses in OpenACC loop directives, by default create a
7373 copy clause on the enclosing parallel construct for carrying back the
7374 results. */
7375 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7376 {
7377 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7378 while (outer_ctx)
7379 {
7380 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7381 if (n != NULL)
7382 {
7383 /* Ignore local variables and explicitly declared clauses. */
7384 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7385 break;
7386 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7387 {
7388 /* According to the OpenACC spec, such a reduction variable
7389 should already have a copy map on a kernels construct,
7390 verify that here. */
7391 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7392 && (n->value & GOVD_MAP));
7393 }
7394 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7395 {
7396 /* Remove firstprivate and make it a copy map. */
7397 n->value &= ~GOVD_FIRSTPRIVATE;
7398 n->value |= GOVD_MAP;
7399 }
7400 }
7401 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7402 {
7403 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7404 GOVD_MAP | GOVD_SEEN);
7405 break;
7406 }
7407 outer_ctx = outer_ctx->outer_context;
7408 }
7409 }
7410 }
7411
7412 /* Notice a threadprivate variable DECL used in OMP context CTX.
7413 This just prints out diagnostics about threadprivate variable uses
7414 in untied tasks. If DECL2 is non-NULL, prevent this warning
7415 on that variable. */
7416
7417 static bool
omp_notice_threadprivate_variable(struct gimplify_omp_ctx * ctx,tree decl,tree decl2)7418 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7419 tree decl2)
7420 {
7421 splay_tree_node n;
7422 struct gimplify_omp_ctx *octx;
7423
7424 for (octx = ctx; octx; octx = octx->outer_context)
7425 if ((octx->region_type & ORT_TARGET) != 0
7426 || octx->order_concurrent)
7427 {
7428 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7429 if (n == NULL)
7430 {
7431 if (octx->order_concurrent)
7432 {
7433 error ("threadprivate variable %qE used in a region with"
7434 " %<order(concurrent)%> clause", DECL_NAME (decl));
7435 inform (octx->location, "enclosing region");
7436 }
7437 else
7438 {
7439 error ("threadprivate variable %qE used in target region",
7440 DECL_NAME (decl));
7441 inform (octx->location, "enclosing target region");
7442 }
7443 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7444 }
7445 if (decl2)
7446 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7447 }
7448
7449 if (ctx->region_type != ORT_UNTIED_TASK)
7450 return false;
7451 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7452 if (n == NULL)
7453 {
7454 error ("threadprivate variable %qE used in untied task",
7455 DECL_NAME (decl));
7456 inform (ctx->location, "enclosing task");
7457 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7458 }
7459 if (decl2)
7460 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7461 return false;
7462 }
7463
7464 /* Return true if global var DECL is device resident. */
7465
7466 static bool
device_resident_p(tree decl)7467 device_resident_p (tree decl)
7468 {
7469 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7470
7471 if (!attr)
7472 return false;
7473
7474 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7475 {
7476 tree c = TREE_VALUE (t);
7477 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7478 return true;
7479 }
7480
7481 return false;
7482 }
7483
7484 /* Return true if DECL has an ACC DECLARE attribute. */
7485
7486 static bool
is_oacc_declared(tree decl)7487 is_oacc_declared (tree decl)
7488 {
7489 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7490 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7491 return declared != NULL_TREE;
7492 }
7493
7494 /* Determine outer default flags for DECL mentioned in an OMP region
7495 but not declared in an enclosing clause.
7496
7497 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7498 remapped firstprivate instead of shared. To some extent this is
7499 addressed in omp_firstprivatize_type_sizes, but not
7500 effectively. */
7501
7502 static unsigned
omp_default_clause(struct gimplify_omp_ctx * ctx,tree decl,bool in_code,unsigned flags)7503 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7504 bool in_code, unsigned flags)
7505 {
7506 enum omp_clause_default_kind default_kind = ctx->default_kind;
7507 enum omp_clause_default_kind kind;
7508
7509 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7510 if (ctx->region_type & ORT_TASK)
7511 {
7512 tree detach_clause = omp_find_clause (ctx->clauses, OMP_CLAUSE_DETACH);
7513
7514 /* The event-handle specified by a detach clause should always be firstprivate,
7515 regardless of the current default. */
7516 if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
7517 kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
7518 }
7519 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7520 default_kind = kind;
7521 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7522 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7523 /* For C/C++ default({,first}private), variables with static storage duration
7524 declared in a namespace or global scope and referenced in construct
7525 must be explicitly specified, i.e. acts as default(none). */
7526 else if ((default_kind == OMP_CLAUSE_DEFAULT_PRIVATE
7527 || default_kind == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
7528 && VAR_P (decl)
7529 && is_global_var (decl)
7530 && (DECL_FILE_SCOPE_P (decl)
7531 || (DECL_CONTEXT (decl)
7532 && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL))
7533 && !lang_GNU_Fortran ())
7534 default_kind = OMP_CLAUSE_DEFAULT_NONE;
7535
7536 switch (default_kind)
7537 {
7538 case OMP_CLAUSE_DEFAULT_NONE:
7539 {
7540 const char *rtype;
7541
7542 if (ctx->region_type & ORT_PARALLEL)
7543 rtype = "parallel";
7544 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7545 rtype = "taskloop";
7546 else if (ctx->region_type & ORT_TASK)
7547 rtype = "task";
7548 else if (ctx->region_type & ORT_TEAMS)
7549 rtype = "teams";
7550 else
7551 gcc_unreachable ();
7552
7553 error ("%qE not specified in enclosing %qs",
7554 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7555 inform (ctx->location, "enclosing %qs", rtype);
7556 }
7557 /* FALLTHRU */
7558 case OMP_CLAUSE_DEFAULT_SHARED:
7559 flags |= GOVD_SHARED;
7560 break;
7561 case OMP_CLAUSE_DEFAULT_PRIVATE:
7562 flags |= GOVD_PRIVATE;
7563 break;
7564 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7565 flags |= GOVD_FIRSTPRIVATE;
7566 break;
7567 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7568 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7569 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7570 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7571 {
7572 omp_notice_variable (octx, decl, in_code);
7573 for (; octx; octx = octx->outer_context)
7574 {
7575 splay_tree_node n2;
7576
7577 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7578 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7579 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7580 continue;
7581 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7582 {
7583 flags |= GOVD_FIRSTPRIVATE;
7584 goto found_outer;
7585 }
7586 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7587 {
7588 flags |= GOVD_SHARED;
7589 goto found_outer;
7590 }
7591 }
7592 }
7593
7594 if (TREE_CODE (decl) == PARM_DECL
7595 || (!is_global_var (decl)
7596 && DECL_CONTEXT (decl) == current_function_decl))
7597 flags |= GOVD_FIRSTPRIVATE;
7598 else
7599 flags |= GOVD_SHARED;
7600 found_outer:
7601 break;
7602
7603 default:
7604 gcc_unreachable ();
7605 }
7606
7607 return flags;
7608 }
7609
7610
7611 /* Determine outer default flags for DECL mentioned in an OACC region
7612 but not declared in an enclosing clause. */
7613
7614 static unsigned
oacc_default_clause(struct gimplify_omp_ctx * ctx,tree decl,unsigned flags)7615 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7616 {
7617 const char *rkind;
7618 bool on_device = false;
7619 bool is_private = false;
7620 bool declared = is_oacc_declared (decl);
7621 tree type = TREE_TYPE (decl);
7622
7623 if (omp_privatize_by_reference (decl))
7624 type = TREE_TYPE (type);
7625
7626 /* For Fortran COMMON blocks, only used variables in those blocks are
7627 transfered and remapped. The block itself will have a private clause to
7628 avoid transfering the data twice.
7629 The hook evaluates to false by default. For a variable in Fortran's COMMON
7630 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7631 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7632 the whole block. For C++ and Fortran, it can also be true under certain
7633 other conditions, if DECL_HAS_VALUE_EXPR. */
7634 if (RECORD_OR_UNION_TYPE_P (type))
7635 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7636
7637 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7638 && is_global_var (decl)
7639 && device_resident_p (decl)
7640 && !is_private)
7641 {
7642 on_device = true;
7643 flags |= GOVD_MAP_TO_ONLY;
7644 }
7645
7646 switch (ctx->region_type)
7647 {
7648 case ORT_ACC_KERNELS:
7649 rkind = "kernels";
7650
7651 if (is_private)
7652 flags |= GOVD_FIRSTPRIVATE;
7653 else if (AGGREGATE_TYPE_P (type))
7654 {
7655 /* Aggregates default to 'present_or_copy', or 'present'. */
7656 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7657 flags |= GOVD_MAP;
7658 else
7659 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7660 }
7661 else
7662 /* Scalars default to 'copy'. */
7663 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7664
7665 break;
7666
7667 case ORT_ACC_PARALLEL:
7668 case ORT_ACC_SERIAL:
7669 rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
7670
7671 if (is_private)
7672 flags |= GOVD_FIRSTPRIVATE;
7673 else if (on_device || declared)
7674 flags |= GOVD_MAP;
7675 else if (AGGREGATE_TYPE_P (type))
7676 {
7677 /* Aggregates default to 'present_or_copy', or 'present'. */
7678 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7679 flags |= GOVD_MAP;
7680 else
7681 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7682 }
7683 else
7684 /* Scalars default to 'firstprivate'. */
7685 flags |= GOVD_FIRSTPRIVATE;
7686
7687 break;
7688
7689 default:
7690 gcc_unreachable ();
7691 }
7692
7693 if (DECL_ARTIFICIAL (decl))
7694 ; /* We can get compiler-generated decls, and should not complain
7695 about them. */
7696 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7697 {
7698 error ("%qE not specified in enclosing OpenACC %qs construct",
7699 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7700 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7701 }
7702 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7703 ; /* Handled above. */
7704 else
7705 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7706
7707 return flags;
7708 }
7709
7710 /* Record the fact that DECL was used within the OMP context CTX.
7711 IN_CODE is true when real code uses DECL, and false when we should
7712 merely emit default(none) errors. Return true if DECL is going to
7713 be remapped and thus DECL shouldn't be gimplified into its
7714 DECL_VALUE_EXPR (if any). */
7715
7716 static bool
omp_notice_variable(struct gimplify_omp_ctx * ctx,tree decl,bool in_code)7717 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7718 {
7719 splay_tree_node n;
7720 unsigned flags = in_code ? GOVD_SEEN : 0;
7721 bool ret = false, shared;
7722
7723 if (error_operand_p (decl))
7724 return false;
7725
7726 if (ctx->region_type == ORT_NONE)
7727 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7728
7729 if (is_global_var (decl))
7730 {
7731 /* Threadprivate variables are predetermined. */
7732 if (DECL_THREAD_LOCAL_P (decl))
7733 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7734
7735 if (DECL_HAS_VALUE_EXPR_P (decl))
7736 {
7737 if (ctx->region_type & ORT_ACC)
7738 /* For OpenACC, defer expansion of value to avoid transfering
7739 privatized common block data instead of im-/explicitly transfered
7740 variables which are in common blocks. */
7741 ;
7742 else
7743 {
7744 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7745
7746 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7747 return omp_notice_threadprivate_variable (ctx, decl, value);
7748 }
7749 }
7750
7751 if (gimplify_omp_ctxp->outer_context == NULL
7752 && VAR_P (decl)
7753 && oacc_get_fn_attrib (current_function_decl))
7754 {
7755 location_t loc = DECL_SOURCE_LOCATION (decl);
7756
7757 if (lookup_attribute ("omp declare target link",
7758 DECL_ATTRIBUTES (decl)))
7759 {
7760 error_at (loc,
7761 "%qE with %<link%> clause used in %<routine%> function",
7762 DECL_NAME (decl));
7763 return false;
7764 }
7765 else if (!lookup_attribute ("omp declare target",
7766 DECL_ATTRIBUTES (decl)))
7767 {
7768 error_at (loc,
7769 "%qE requires a %<declare%> directive for use "
7770 "in a %<routine%> function", DECL_NAME (decl));
7771 return false;
7772 }
7773 }
7774 }
7775
7776 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7777 if ((ctx->region_type & ORT_TARGET) != 0)
7778 {
7779 if (ctx->region_type & ORT_ACC)
7780 /* For OpenACC, as remarked above, defer expansion. */
7781 shared = false;
7782 else
7783 shared = true;
7784
7785 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7786 if (n == NULL)
7787 {
7788 unsigned nflags = flags;
7789 if ((ctx->region_type & ORT_ACC) == 0)
7790 {
7791 bool is_declare_target = false;
7792 if (is_global_var (decl)
7793 && varpool_node::get_create (decl)->offloadable)
7794 {
7795 struct gimplify_omp_ctx *octx;
7796 for (octx = ctx->outer_context;
7797 octx; octx = octx->outer_context)
7798 {
7799 n = splay_tree_lookup (octx->variables,
7800 (splay_tree_key)decl);
7801 if (n
7802 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7803 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7804 break;
7805 }
7806 is_declare_target = octx == NULL;
7807 }
7808 if (!is_declare_target)
7809 {
7810 int gdmk;
7811 enum omp_clause_defaultmap_kind kind;
7812 if (lang_hooks.decls.omp_allocatable_p (decl))
7813 gdmk = GDMK_ALLOCATABLE;
7814 else if (lang_hooks.decls.omp_scalar_target_p (decl))
7815 gdmk = GDMK_SCALAR_TARGET;
7816 else if (lang_hooks.decls.omp_scalar_p (decl, false))
7817 gdmk = GDMK_SCALAR;
7818 else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7819 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7820 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7821 == POINTER_TYPE)))
7822 gdmk = GDMK_POINTER;
7823 else
7824 gdmk = GDMK_AGGREGATE;
7825 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
7826 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
7827 {
7828 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
7829 nflags |= GOVD_FIRSTPRIVATE;
7830 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
7831 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
7832 else
7833 gcc_unreachable ();
7834 }
7835 else if (ctx->defaultmap[gdmk] == 0)
7836 {
7837 tree d = lang_hooks.decls.omp_report_decl (decl);
7838 error ("%qE not specified in enclosing %<target%>",
7839 DECL_NAME (d));
7840 inform (ctx->location, "enclosing %<target%>");
7841 }
7842 else if (ctx->defaultmap[gdmk]
7843 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7844 nflags |= ctx->defaultmap[gdmk];
7845 else
7846 {
7847 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7848 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7849 }
7850 }
7851 }
7852
7853 struct gimplify_omp_ctx *octx = ctx->outer_context;
7854 if ((ctx->region_type & ORT_ACC) && octx)
7855 {
7856 /* Look in outer OpenACC contexts, to see if there's a
7857 data attribute for this variable. */
7858 omp_notice_variable (octx, decl, in_code);
7859
7860 for (; octx; octx = octx->outer_context)
7861 {
7862 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7863 break;
7864 splay_tree_node n2
7865 = splay_tree_lookup (octx->variables,
7866 (splay_tree_key) decl);
7867 if (n2)
7868 {
7869 if (octx->region_type == ORT_ACC_HOST_DATA)
7870 error ("variable %qE declared in enclosing "
7871 "%<host_data%> region", DECL_NAME (decl));
7872 nflags |= GOVD_MAP;
7873 if (octx->region_type == ORT_ACC_DATA
7874 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7875 nflags |= GOVD_MAP_0LEN_ARRAY;
7876 goto found_outer;
7877 }
7878 }
7879 }
7880
7881 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7882 | GOVD_MAP_ALLOC_ONLY)) == flags)
7883 {
7884 tree type = TREE_TYPE (decl);
7885
7886 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7887 && omp_privatize_by_reference (decl))
7888 type = TREE_TYPE (type);
7889 if (!lang_hooks.types.omp_mappable_type (type))
7890 {
7891 error ("%qD referenced in target region does not have "
7892 "a mappable type", decl);
7893 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7894 }
7895 else
7896 {
7897 if ((ctx->region_type & ORT_ACC) != 0)
7898 nflags = oacc_default_clause (ctx, decl, flags);
7899 else
7900 nflags |= GOVD_MAP;
7901 }
7902 }
7903 found_outer:
7904 omp_add_variable (ctx, decl, nflags);
7905 }
7906 else
7907 {
7908 /* If nothing changed, there's nothing left to do. */
7909 if ((n->value & flags) == flags)
7910 return ret;
7911 flags |= n->value;
7912 n->value = flags;
7913 }
7914 goto do_outer;
7915 }
7916
7917 if (n == NULL)
7918 {
7919 if (ctx->region_type == ORT_WORKSHARE
7920 || ctx->region_type == ORT_TASKGROUP
7921 || ctx->region_type == ORT_SIMD
7922 || ctx->region_type == ORT_ACC
7923 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7924 goto do_outer;
7925
7926 flags = omp_default_clause (ctx, decl, in_code, flags);
7927
7928 if ((flags & GOVD_PRIVATE)
7929 && lang_hooks.decls.omp_private_outer_ref (decl))
7930 flags |= GOVD_PRIVATE_OUTER_REF;
7931
7932 omp_add_variable (ctx, decl, flags);
7933
7934 shared = (flags & GOVD_SHARED) != 0;
7935 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7936 goto do_outer;
7937 }
7938
7939 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
7940 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
7941 if (ctx->region_type == ORT_SIMD
7942 && ctx->in_for_exprs
7943 && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
7944 == GOVD_PRIVATE))
7945 flags &= ~GOVD_SEEN;
7946
7947 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7948 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7949 && DECL_SIZE (decl))
7950 {
7951 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7952 {
7953 splay_tree_node n2;
7954 tree t = DECL_VALUE_EXPR (decl);
7955 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7956 t = TREE_OPERAND (t, 0);
7957 gcc_assert (DECL_P (t));
7958 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7959 n2->value |= GOVD_SEEN;
7960 }
7961 else if (omp_privatize_by_reference (decl)
7962 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7963 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7964 != INTEGER_CST))
7965 {
7966 splay_tree_node n2;
7967 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7968 gcc_assert (DECL_P (t));
7969 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7970 if (n2)
7971 omp_notice_variable (ctx, t, true);
7972 }
7973 }
7974
7975 if (ctx->region_type & ORT_ACC)
7976 /* For OpenACC, as remarked above, defer expansion. */
7977 shared = false;
7978 else
7979 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7980 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7981
7982 /* If nothing changed, there's nothing left to do. */
7983 if ((n->value & flags) == flags)
7984 return ret;
7985 flags |= n->value;
7986 n->value = flags;
7987
7988 do_outer:
7989 /* If the variable is private in the current context, then we don't
7990 need to propagate anything to an outer context. */
7991 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7992 return ret;
7993 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7994 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7995 return ret;
7996 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7997 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7998 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7999 return ret;
8000 if (ctx->outer_context
8001 && omp_notice_variable (ctx->outer_context, decl, in_code))
8002 return true;
8003 return ret;
8004 }
8005
8006 /* Verify that DECL is private within CTX. If there's specific information
8007 to the contrary in the innermost scope, generate an error. */
8008
8009 static bool
omp_is_private(struct gimplify_omp_ctx * ctx,tree decl,int simd)8010 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
8011 {
8012 splay_tree_node n;
8013
8014 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8015 if (n != NULL)
8016 {
8017 if (n->value & GOVD_SHARED)
8018 {
8019 if (ctx == gimplify_omp_ctxp)
8020 {
8021 if (simd)
8022 error ("iteration variable %qE is predetermined linear",
8023 DECL_NAME (decl));
8024 else
8025 error ("iteration variable %qE should be private",
8026 DECL_NAME (decl));
8027 n->value = GOVD_PRIVATE;
8028 return true;
8029 }
8030 else
8031 return false;
8032 }
8033 else if ((n->value & GOVD_EXPLICIT) != 0
8034 && (ctx == gimplify_omp_ctxp
8035 || (ctx->region_type == ORT_COMBINED_PARALLEL
8036 && gimplify_omp_ctxp->outer_context == ctx)))
8037 {
8038 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
8039 error ("iteration variable %qE should not be firstprivate",
8040 DECL_NAME (decl));
8041 else if ((n->value & GOVD_REDUCTION) != 0)
8042 error ("iteration variable %qE should not be reduction",
8043 DECL_NAME (decl));
8044 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
8045 error ("iteration variable %qE should not be linear",
8046 DECL_NAME (decl));
8047 }
8048 return (ctx == gimplify_omp_ctxp
8049 || (ctx->region_type == ORT_COMBINED_PARALLEL
8050 && gimplify_omp_ctxp->outer_context == ctx));
8051 }
8052
8053 if (ctx->region_type != ORT_WORKSHARE
8054 && ctx->region_type != ORT_TASKGROUP
8055 && ctx->region_type != ORT_SIMD
8056 && ctx->region_type != ORT_ACC)
8057 return false;
8058 else if (ctx->outer_context)
8059 return omp_is_private (ctx->outer_context, decl, simd);
8060 return false;
8061 }
8062
8063 /* Return true if DECL is private within a parallel region
8064 that binds to the current construct's context or in parallel
8065 region's REDUCTION clause. */
8066
8067 static bool
omp_check_private(struct gimplify_omp_ctx * ctx,tree decl,bool copyprivate)8068 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
8069 {
8070 splay_tree_node n;
8071
8072 do
8073 {
8074 ctx = ctx->outer_context;
8075 if (ctx == NULL)
8076 {
8077 if (is_global_var (decl))
8078 return false;
8079
8080 /* References might be private, but might be shared too,
8081 when checking for copyprivate, assume they might be
8082 private, otherwise assume they might be shared. */
8083 if (copyprivate)
8084 return true;
8085
8086 if (omp_privatize_by_reference (decl))
8087 return false;
8088
8089 /* Treat C++ privatized non-static data members outside
8090 of the privatization the same. */
8091 if (omp_member_access_dummy_var (decl))
8092 return false;
8093
8094 return true;
8095 }
8096
8097 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8098
8099 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
8100 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
8101 {
8102 if ((ctx->region_type & ORT_TARGET_DATA) != 0
8103 || n == NULL
8104 || (n->value & GOVD_MAP) == 0)
8105 continue;
8106 return false;
8107 }
8108
8109 if (n != NULL)
8110 {
8111 if ((n->value & GOVD_LOCAL) != 0
8112 && omp_member_access_dummy_var (decl))
8113 return false;
8114 return (n->value & GOVD_SHARED) == 0;
8115 }
8116
8117 if (ctx->region_type == ORT_WORKSHARE
8118 || ctx->region_type == ORT_TASKGROUP
8119 || ctx->region_type == ORT_SIMD
8120 || ctx->region_type == ORT_ACC)
8121 continue;
8122
8123 break;
8124 }
8125 while (1);
8126 return false;
8127 }
8128
8129 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8130
8131 static tree
find_decl_expr(tree * tp,int * walk_subtrees,void * data)8132 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
8133 {
8134 tree t = *tp;
8135
8136 /* If this node has been visited, unmark it and keep looking. */
8137 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
8138 return t;
8139
8140 if (IS_TYPE_OR_DECL_P (t))
8141 *walk_subtrees = 0;
8142 return NULL_TREE;
8143 }
8144
8145
8146 /* Gimplify the affinity clause but effectively ignore it.
8147 Generate:
8148 var = begin;
8149 if ((step > 1) ? var <= end : var > end)
8150 locatator_var_expr; */
8151
8152 static void
gimplify_omp_affinity(tree * list_p,gimple_seq * pre_p)8153 gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
8154 {
8155 tree last_iter = NULL_TREE;
8156 tree last_bind = NULL_TREE;
8157 tree label = NULL_TREE;
8158 tree *last_body = NULL;
8159 for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8160 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
8161 {
8162 tree t = OMP_CLAUSE_DECL (c);
8163 if (TREE_CODE (t) == TREE_LIST
8164 && TREE_PURPOSE (t)
8165 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8166 {
8167 if (TREE_VALUE (t) == null_pointer_node)
8168 continue;
8169 if (TREE_PURPOSE (t) != last_iter)
8170 {
8171 if (last_bind)
8172 {
8173 append_to_statement_list (label, last_body);
8174 gimplify_and_add (last_bind, pre_p);
8175 last_bind = NULL_TREE;
8176 }
8177 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8178 {
8179 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8180 is_gimple_val, fb_rvalue) == GS_ERROR
8181 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8182 is_gimple_val, fb_rvalue) == GS_ERROR
8183 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8184 is_gimple_val, fb_rvalue) == GS_ERROR
8185 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8186 is_gimple_val, fb_rvalue)
8187 == GS_ERROR))
8188 return;
8189 }
8190 last_iter = TREE_PURPOSE (t);
8191 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8192 last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
8193 NULL, block);
8194 last_body = &BIND_EXPR_BODY (last_bind);
8195 tree cond = NULL_TREE;
8196 location_t loc = OMP_CLAUSE_LOCATION (c);
8197 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8198 {
8199 tree var = TREE_VEC_ELT (it, 0);
8200 tree begin = TREE_VEC_ELT (it, 1);
8201 tree end = TREE_VEC_ELT (it, 2);
8202 tree step = TREE_VEC_ELT (it, 3);
8203 loc = DECL_SOURCE_LOCATION (var);
8204 tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8205 var, begin);
8206 append_to_statement_list_force (tem, last_body);
8207
8208 tree cond1 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8209 step, build_zero_cst (TREE_TYPE (step)));
8210 tree cond2 = fold_build2_loc (loc, LE_EXPR, boolean_type_node,
8211 var, end);
8212 tree cond3 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8213 var, end);
8214 cond1 = fold_build3_loc (loc, COND_EXPR, boolean_type_node,
8215 cond1, cond2, cond3);
8216 if (cond)
8217 cond = fold_build2_loc (loc, TRUTH_AND_EXPR,
8218 boolean_type_node, cond, cond1);
8219 else
8220 cond = cond1;
8221 }
8222 tree cont_label = create_artificial_label (loc);
8223 label = build1 (LABEL_EXPR, void_type_node, cont_label);
8224 tree tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
8225 void_node,
8226 build_and_jump (&cont_label));
8227 append_to_statement_list_force (tem, last_body);
8228 }
8229 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8230 {
8231 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 0),
8232 last_body);
8233 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8234 }
8235 if (error_operand_p (TREE_VALUE (t)))
8236 return;
8237 append_to_statement_list_force (TREE_VALUE (t), last_body);
8238 TREE_VALUE (t) = null_pointer_node;
8239 }
8240 else
8241 {
8242 if (last_bind)
8243 {
8244 append_to_statement_list (label, last_body);
8245 gimplify_and_add (last_bind, pre_p);
8246 last_bind = NULL_TREE;
8247 }
8248 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8249 {
8250 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8251 NULL, is_gimple_val, fb_rvalue);
8252 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8253 }
8254 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8255 return;
8256 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8257 is_gimple_lvalue, fb_lvalue) == GS_ERROR)
8258 return;
8259 gimplify_and_add (OMP_CLAUSE_DECL (c), pre_p);
8260 }
8261 }
8262 if (last_bind)
8263 {
8264 append_to_statement_list (label, last_body);
8265 gimplify_and_add (last_bind, pre_p);
8266 }
8267 return;
8268 }
8269
8270 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8271 lower all the depend clauses by populating corresponding depend
8272 array. Returns 0 if there are no such depend clauses, or
8273 2 if all depend clauses should be removed, 1 otherwise. */
8274
8275 static int
gimplify_omp_depend(tree * list_p,gimple_seq * pre_p)8276 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
8277 {
8278 tree c;
8279 gimple *g;
8280 size_t n[4] = { 0, 0, 0, 0 };
8281 bool unused[4];
8282 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
8283 tree last_iter = NULL_TREE, last_count = NULL_TREE;
8284 size_t i, j;
8285 location_t first_loc = UNKNOWN_LOCATION;
8286
8287 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8288 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8289 {
8290 switch (OMP_CLAUSE_DEPEND_KIND (c))
8291 {
8292 case OMP_CLAUSE_DEPEND_IN:
8293 i = 2;
8294 break;
8295 case OMP_CLAUSE_DEPEND_OUT:
8296 case OMP_CLAUSE_DEPEND_INOUT:
8297 i = 0;
8298 break;
8299 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8300 i = 1;
8301 break;
8302 case OMP_CLAUSE_DEPEND_DEPOBJ:
8303 i = 3;
8304 break;
8305 case OMP_CLAUSE_DEPEND_SOURCE:
8306 case OMP_CLAUSE_DEPEND_SINK:
8307 continue;
8308 default:
8309 gcc_unreachable ();
8310 }
8311 tree t = OMP_CLAUSE_DECL (c);
8312 if (first_loc == UNKNOWN_LOCATION)
8313 first_loc = OMP_CLAUSE_LOCATION (c);
8314 if (TREE_CODE (t) == TREE_LIST
8315 && TREE_PURPOSE (t)
8316 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8317 {
8318 if (TREE_PURPOSE (t) != last_iter)
8319 {
8320 tree tcnt = size_one_node;
8321 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8322 {
8323 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8324 is_gimple_val, fb_rvalue) == GS_ERROR
8325 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8326 is_gimple_val, fb_rvalue) == GS_ERROR
8327 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8328 is_gimple_val, fb_rvalue) == GS_ERROR
8329 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8330 is_gimple_val, fb_rvalue)
8331 == GS_ERROR))
8332 return 2;
8333 tree var = TREE_VEC_ELT (it, 0);
8334 tree begin = TREE_VEC_ELT (it, 1);
8335 tree end = TREE_VEC_ELT (it, 2);
8336 tree step = TREE_VEC_ELT (it, 3);
8337 tree orig_step = TREE_VEC_ELT (it, 4);
8338 tree type = TREE_TYPE (var);
8339 tree stype = TREE_TYPE (step);
8340 location_t loc = DECL_SOURCE_LOCATION (var);
8341 tree endmbegin;
8342 /* Compute count for this iterator as
8343 orig_step > 0
8344 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8345 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8346 and compute product of those for the entire depend
8347 clause. */
8348 if (POINTER_TYPE_P (type))
8349 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
8350 stype, end, begin);
8351 else
8352 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
8353 end, begin);
8354 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
8355 step,
8356 build_int_cst (stype, 1));
8357 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
8358 build_int_cst (stype, 1));
8359 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
8360 unshare_expr (endmbegin),
8361 stepm1);
8362 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8363 pos, step);
8364 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
8365 endmbegin, stepp1);
8366 if (TYPE_UNSIGNED (stype))
8367 {
8368 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
8369 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
8370 }
8371 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8372 neg, step);
8373 step = NULL_TREE;
8374 tree cond = fold_build2_loc (loc, LT_EXPR,
8375 boolean_type_node,
8376 begin, end);
8377 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
8378 build_int_cst (stype, 0));
8379 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
8380 end, begin);
8381 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
8382 build_int_cst (stype, 0));
8383 tree osteptype = TREE_TYPE (orig_step);
8384 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8385 orig_step,
8386 build_int_cst (osteptype, 0));
8387 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
8388 cond, pos, neg);
8389 cnt = fold_convert_loc (loc, sizetype, cnt);
8390 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
8391 fb_rvalue) == GS_ERROR)
8392 return 2;
8393 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
8394 }
8395 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
8396 fb_rvalue) == GS_ERROR)
8397 return 2;
8398 last_iter = TREE_PURPOSE (t);
8399 last_count = tcnt;
8400 }
8401 if (counts[i] == NULL_TREE)
8402 counts[i] = last_count;
8403 else
8404 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
8405 PLUS_EXPR, counts[i], last_count);
8406 }
8407 else
8408 n[i]++;
8409 }
8410 for (i = 0; i < 4; i++)
8411 if (counts[i])
8412 break;
8413 if (i == 4)
8414 return 0;
8415
8416 tree total = size_zero_node;
8417 for (i = 0; i < 4; i++)
8418 {
8419 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
8420 if (counts[i] == NULL_TREE)
8421 counts[i] = size_zero_node;
8422 if (n[i])
8423 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
8424 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
8425 fb_rvalue) == GS_ERROR)
8426 return 2;
8427 total = size_binop (PLUS_EXPR, total, counts[i]);
8428 }
8429
8430 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
8431 == GS_ERROR)
8432 return 2;
8433 bool is_old = unused[1] && unused[3];
8434 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
8435 size_int (is_old ? 1 : 4));
8436 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
8437 tree array = create_tmp_var_raw (type);
8438 TREE_ADDRESSABLE (array) = 1;
8439 if (!poly_int_tree_p (totalpx))
8440 {
8441 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
8442 gimplify_type_sizes (TREE_TYPE (array), pre_p);
8443 if (gimplify_omp_ctxp)
8444 {
8445 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8446 while (ctx
8447 && (ctx->region_type == ORT_WORKSHARE
8448 || ctx->region_type == ORT_TASKGROUP
8449 || ctx->region_type == ORT_SIMD
8450 || ctx->region_type == ORT_ACC))
8451 ctx = ctx->outer_context;
8452 if (ctx)
8453 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
8454 }
8455 gimplify_vla_decl (array, pre_p);
8456 }
8457 else
8458 gimple_add_tmp_var (array);
8459 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8460 NULL_TREE);
8461 tree tem;
8462 if (!is_old)
8463 {
8464 tem = build2 (MODIFY_EXPR, void_type_node, r,
8465 build_int_cst (ptr_type_node, 0));
8466 gimplify_and_add (tem, pre_p);
8467 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8468 NULL_TREE);
8469 }
8470 tem = build2 (MODIFY_EXPR, void_type_node, r,
8471 fold_convert (ptr_type_node, total));
8472 gimplify_and_add (tem, pre_p);
8473 for (i = 1; i < (is_old ? 2 : 4); i++)
8474 {
8475 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
8476 NULL_TREE, NULL_TREE);
8477 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
8478 gimplify_and_add (tem, pre_p);
8479 }
8480
8481 tree cnts[4];
8482 for (j = 4; j; j--)
8483 if (!unused[j - 1])
8484 break;
8485 for (i = 0; i < 4; i++)
8486 {
8487 if (i && (i >= j || unused[i - 1]))
8488 {
8489 cnts[i] = cnts[i - 1];
8490 continue;
8491 }
8492 cnts[i] = create_tmp_var (sizetype);
8493 if (i == 0)
8494 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8495 else
8496 {
8497 tree t;
8498 if (is_old)
8499 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8500 else
8501 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8502 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8503 == GS_ERROR)
8504 return 2;
8505 g = gimple_build_assign (cnts[i], t);
8506 }
8507 gimple_seq_add_stmt (pre_p, g);
8508 }
8509
8510 last_iter = NULL_TREE;
8511 tree last_bind = NULL_TREE;
8512 tree *last_body = NULL;
8513 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8514 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8515 {
8516 switch (OMP_CLAUSE_DEPEND_KIND (c))
8517 {
8518 case OMP_CLAUSE_DEPEND_IN:
8519 i = 2;
8520 break;
8521 case OMP_CLAUSE_DEPEND_OUT:
8522 case OMP_CLAUSE_DEPEND_INOUT:
8523 i = 0;
8524 break;
8525 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8526 i = 1;
8527 break;
8528 case OMP_CLAUSE_DEPEND_DEPOBJ:
8529 i = 3;
8530 break;
8531 case OMP_CLAUSE_DEPEND_SOURCE:
8532 case OMP_CLAUSE_DEPEND_SINK:
8533 continue;
8534 default:
8535 gcc_unreachable ();
8536 }
8537 tree t = OMP_CLAUSE_DECL (c);
8538 if (TREE_CODE (t) == TREE_LIST
8539 && TREE_PURPOSE (t)
8540 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8541 {
8542 if (TREE_PURPOSE (t) != last_iter)
8543 {
8544 if (last_bind)
8545 gimplify_and_add (last_bind, pre_p);
8546 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8547 last_bind = build3 (BIND_EXPR, void_type_node,
8548 BLOCK_VARS (block), NULL, block);
8549 TREE_SIDE_EFFECTS (last_bind) = 1;
8550 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8551 tree *p = &BIND_EXPR_BODY (last_bind);
8552 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8553 {
8554 tree var = TREE_VEC_ELT (it, 0);
8555 tree begin = TREE_VEC_ELT (it, 1);
8556 tree end = TREE_VEC_ELT (it, 2);
8557 tree step = TREE_VEC_ELT (it, 3);
8558 tree orig_step = TREE_VEC_ELT (it, 4);
8559 tree type = TREE_TYPE (var);
8560 location_t loc = DECL_SOURCE_LOCATION (var);
8561 /* Emit:
8562 var = begin;
8563 goto cond_label;
8564 beg_label:
8565 ...
8566 var = var + step;
8567 cond_label:
8568 if (orig_step > 0) {
8569 if (var < end) goto beg_label;
8570 } else {
8571 if (var > end) goto beg_label;
8572 }
8573 for each iterator, with inner iterators added to
8574 the ... above. */
8575 tree beg_label = create_artificial_label (loc);
8576 tree cond_label = NULL_TREE;
8577 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8578 var, begin);
8579 append_to_statement_list_force (tem, p);
8580 tem = build_and_jump (&cond_label);
8581 append_to_statement_list_force (tem, p);
8582 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8583 append_to_statement_list (tem, p);
8584 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8585 NULL_TREE, NULL_TREE);
8586 TREE_SIDE_EFFECTS (bind) = 1;
8587 SET_EXPR_LOCATION (bind, loc);
8588 append_to_statement_list_force (bind, p);
8589 if (POINTER_TYPE_P (type))
8590 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8591 var, fold_convert_loc (loc, sizetype,
8592 step));
8593 else
8594 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8595 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8596 var, tem);
8597 append_to_statement_list_force (tem, p);
8598 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8599 append_to_statement_list (tem, p);
8600 tree cond = fold_build2_loc (loc, LT_EXPR,
8601 boolean_type_node,
8602 var, end);
8603 tree pos
8604 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8605 cond, build_and_jump (&beg_label),
8606 void_node);
8607 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8608 var, end);
8609 tree neg
8610 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8611 cond, build_and_jump (&beg_label),
8612 void_node);
8613 tree osteptype = TREE_TYPE (orig_step);
8614 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8615 orig_step,
8616 build_int_cst (osteptype, 0));
8617 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8618 cond, pos, neg);
8619 append_to_statement_list_force (tem, p);
8620 p = &BIND_EXPR_BODY (bind);
8621 }
8622 last_body = p;
8623 }
8624 last_iter = TREE_PURPOSE (t);
8625 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8626 {
8627 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8628 0), last_body);
8629 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8630 }
8631 if (error_operand_p (TREE_VALUE (t)))
8632 return 2;
8633 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8634 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8635 NULL_TREE, NULL_TREE);
8636 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8637 void_type_node, r, TREE_VALUE (t));
8638 append_to_statement_list_force (tem, last_body);
8639 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8640 void_type_node, cnts[i],
8641 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
8642 append_to_statement_list_force (tem, last_body);
8643 TREE_VALUE (t) = null_pointer_node;
8644 }
8645 else
8646 {
8647 if (last_bind)
8648 {
8649 gimplify_and_add (last_bind, pre_p);
8650 last_bind = NULL_TREE;
8651 }
8652 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8653 {
8654 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8655 NULL, is_gimple_val, fb_rvalue);
8656 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8657 }
8658 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8659 return 2;
8660 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8661 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8662 is_gimple_val, fb_rvalue) == GS_ERROR)
8663 return 2;
8664 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8665 NULL_TREE, NULL_TREE);
8666 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8667 gimplify_and_add (tem, pre_p);
8668 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
8669 size_int (1)));
8670 gimple_seq_add_stmt (pre_p, g);
8671 }
8672 }
8673 if (last_bind)
8674 gimplify_and_add (last_bind, pre_p);
8675 tree cond = boolean_false_node;
8676 if (is_old)
8677 {
8678 if (!unused[0])
8679 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8680 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8681 size_int (2)));
8682 if (!unused[2])
8683 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8684 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8685 cnts[2],
8686 size_binop_loc (first_loc, PLUS_EXPR,
8687 totalpx,
8688 size_int (1))));
8689 }
8690 else
8691 {
8692 tree prev = size_int (5);
8693 for (i = 0; i < 4; i++)
8694 {
8695 if (unused[i])
8696 continue;
8697 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8698 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8699 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8700 cnts[i], unshare_expr (prev)));
8701 }
8702 }
8703 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8704 build_call_expr_loc (first_loc,
8705 builtin_decl_explicit (BUILT_IN_TRAP),
8706 0), void_node);
8707 gimplify_and_add (tem, pre_p);
8708 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8709 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8710 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8711 OMP_CLAUSE_CHAIN (c) = *list_p;
8712 *list_p = c;
8713 return 1;
8714 }
8715
8716 /* Insert a GOMP_MAP_ALLOC or GOMP_MAP_RELEASE node following a
8717 GOMP_MAP_STRUCT mapping. C is an always_pointer mapping. STRUCT_NODE is
8718 the struct node to insert the new mapping after (when the struct node is
8719 initially created). PREV_NODE is the first of two or three mappings for a
8720 pointer, and is either:
8721 - the node before C, when a pair of mappings is used, e.g. for a C/C++
8722 array section.
8723 - not the node before C. This is true when we have a reference-to-pointer
8724 type (with a mapping for the reference and for the pointer), or for
8725 Fortran derived-type mappings with a GOMP_MAP_TO_PSET.
8726 If SCP is non-null, the new node is inserted before *SCP.
8727 if SCP is null, the new node is inserted before PREV_NODE.
8728 The return type is:
8729 - PREV_NODE, if SCP is non-null.
8730 - The newly-created ALLOC or RELEASE node, if SCP is null.
8731 - The second newly-created ALLOC or RELEASE node, if we are mapping a
8732 reference to a pointer. */
8733
8734 static tree
insert_struct_comp_map(enum tree_code code,tree c,tree struct_node,tree prev_node,tree * scp)8735 insert_struct_comp_map (enum tree_code code, tree c, tree struct_node,
8736 tree prev_node, tree *scp)
8737 {
8738 enum gomp_map_kind mkind
8739 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8740 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8741
8742 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8743 tree cl = scp ? prev_node : c2;
8744 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8745 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (c));
8746 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : prev_node;
8747 if (OMP_CLAUSE_CHAIN (prev_node) != c
8748 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8749 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8750 == GOMP_MAP_TO_PSET))
8751 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (OMP_CLAUSE_CHAIN (prev_node));
8752 else
8753 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
8754 if (struct_node)
8755 OMP_CLAUSE_CHAIN (struct_node) = c2;
8756
8757 /* We might need to create an additional mapping if we have a reference to a
8758 pointer (in C++). Don't do this if we have something other than a
8759 GOMP_MAP_ALWAYS_POINTER though, i.e. a GOMP_MAP_TO_PSET. */
8760 if (OMP_CLAUSE_CHAIN (prev_node) != c
8761 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8762 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8763 == GOMP_MAP_ALWAYS_POINTER)
8764 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8765 == GOMP_MAP_ATTACH_DETACH)))
8766 {
8767 tree c4 = OMP_CLAUSE_CHAIN (prev_node);
8768 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8769 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8770 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (c4));
8771 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8772 OMP_CLAUSE_CHAIN (c3) = prev_node;
8773 if (!scp)
8774 OMP_CLAUSE_CHAIN (c2) = c3;
8775 else
8776 cl = c3;
8777 }
8778
8779 if (scp)
8780 *scp = c2;
8781
8782 return cl;
8783 }
8784
8785 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8786 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8787 If BASE_REF is non-NULL and the containing object is a reference, set
8788 *BASE_REF to that reference before dereferencing the object.
8789 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8790 has array type, else return NULL. */
8791
8792 static tree
extract_base_bit_offset(tree base,tree * base_ref,poly_int64 * bitposp,poly_offset_int * poffsetp,tree * offsetp)8793 extract_base_bit_offset (tree base, tree *base_ref, poly_int64 *bitposp,
8794 poly_offset_int *poffsetp, tree *offsetp)
8795 {
8796 tree offset;
8797 poly_int64 bitsize, bitpos;
8798 machine_mode mode;
8799 int unsignedp, reversep, volatilep = 0;
8800 poly_offset_int poffset;
8801
8802 if (base_ref)
8803 {
8804 *base_ref = NULL_TREE;
8805
8806 while (TREE_CODE (base) == ARRAY_REF)
8807 base = TREE_OPERAND (base, 0);
8808
8809 if (TREE_CODE (base) == INDIRECT_REF)
8810 base = TREE_OPERAND (base, 0);
8811 }
8812 else
8813 {
8814 if (TREE_CODE (base) == ARRAY_REF)
8815 {
8816 while (TREE_CODE (base) == ARRAY_REF)
8817 base = TREE_OPERAND (base, 0);
8818 if (TREE_CODE (base) != COMPONENT_REF
8819 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE)
8820 return NULL_TREE;
8821 }
8822 else if (TREE_CODE (base) == INDIRECT_REF
8823 && TREE_CODE (TREE_OPERAND (base, 0)) == COMPONENT_REF
8824 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8825 == REFERENCE_TYPE))
8826 base = TREE_OPERAND (base, 0);
8827 }
8828
8829 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8830 &unsignedp, &reversep, &volatilep);
8831
8832 tree orig_base = base;
8833
8834 if ((TREE_CODE (base) == INDIRECT_REF
8835 || (TREE_CODE (base) == MEM_REF
8836 && integer_zerop (TREE_OPERAND (base, 1))))
8837 && DECL_P (TREE_OPERAND (base, 0))
8838 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0))) == REFERENCE_TYPE)
8839 base = TREE_OPERAND (base, 0);
8840
8841 if (offset && poly_int_tree_p (offset))
8842 {
8843 poffset = wi::to_poly_offset (offset);
8844 offset = NULL_TREE;
8845 }
8846 else
8847 poffset = 0;
8848
8849 if (maybe_ne (bitpos, 0))
8850 poffset += bits_to_bytes_round_down (bitpos);
8851
8852 *bitposp = bitpos;
8853 *poffsetp = poffset;
8854 *offsetp = offset;
8855
8856 /* Set *BASE_REF if BASE was a dereferenced reference variable. */
8857 if (base_ref && orig_base != base)
8858 *base_ref = orig_base;
8859
8860 return base;
8861 }
8862
8863 /* Returns true if EXPR is or contains (as a sub-component) BASE_PTR. */
8864
8865 static bool
is_or_contains_p(tree expr,tree base_ptr)8866 is_or_contains_p (tree expr, tree base_ptr)
8867 {
8868 if ((TREE_CODE (expr) == INDIRECT_REF && TREE_CODE (base_ptr) == MEM_REF)
8869 || (TREE_CODE (expr) == MEM_REF && TREE_CODE (base_ptr) == INDIRECT_REF))
8870 return operand_equal_p (TREE_OPERAND (expr, 0),
8871 TREE_OPERAND (base_ptr, 0));
8872 while (!operand_equal_p (expr, base_ptr))
8873 {
8874 if (TREE_CODE (base_ptr) == COMPOUND_EXPR)
8875 base_ptr = TREE_OPERAND (base_ptr, 1);
8876 if (TREE_CODE (base_ptr) == COMPONENT_REF
8877 || TREE_CODE (base_ptr) == POINTER_PLUS_EXPR
8878 || TREE_CODE (base_ptr) == SAVE_EXPR)
8879 base_ptr = TREE_OPERAND (base_ptr, 0);
8880 else
8881 break;
8882 }
8883 return operand_equal_p (expr, base_ptr);
8884 }
8885
8886 /* Implement OpenMP 5.x map ordering rules for target directives. There are
8887 several rules, and with some level of ambiguity, hopefully we can at least
8888 collect the complexity here in one place. */
8889
8890 static void
omp_target_reorder_clauses(tree * list_p)8891 omp_target_reorder_clauses (tree *list_p)
8892 {
8893 /* Collect refs to alloc/release/delete maps. */
8894 auto_vec<tree, 32> ard;
8895 tree *cp = list_p;
8896 while (*cp != NULL_TREE)
8897 if (OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP
8898 && (OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ALLOC
8899 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_RELEASE
8900 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_DELETE))
8901 {
8902 /* Unlink cp and push to ard. */
8903 tree c = *cp;
8904 tree nc = OMP_CLAUSE_CHAIN (c);
8905 *cp = nc;
8906 ard.safe_push (c);
8907
8908 /* Any associated pointer type maps should also move along. */
8909 while (*cp != NULL_TREE
8910 && OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP
8911 && (OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
8912 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_FIRSTPRIVATE_POINTER
8913 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ATTACH_DETACH
8914 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_POINTER
8915 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ALWAYS_POINTER
8916 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_TO_PSET))
8917 {
8918 c = *cp;
8919 nc = OMP_CLAUSE_CHAIN (c);
8920 *cp = nc;
8921 ard.safe_push (c);
8922 }
8923 }
8924 else
8925 cp = &OMP_CLAUSE_CHAIN (*cp);
8926
8927 /* Link alloc/release/delete maps to the end of list. */
8928 for (unsigned int i = 0; i < ard.length (); i++)
8929 {
8930 *cp = ard[i];
8931 cp = &OMP_CLAUSE_CHAIN (ard[i]);
8932 }
8933 *cp = NULL_TREE;
8934
8935 /* OpenMP 5.0 requires that pointer variables are mapped before
8936 its use as a base-pointer. */
8937 auto_vec<tree *, 32> atf;
8938 for (tree *cp = list_p; *cp; cp = &OMP_CLAUSE_CHAIN (*cp))
8939 if (OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP)
8940 {
8941 /* Collect alloc, to, from, to/from clause tree pointers. */
8942 gomp_map_kind k = OMP_CLAUSE_MAP_KIND (*cp);
8943 if (k == GOMP_MAP_ALLOC
8944 || k == GOMP_MAP_TO
8945 || k == GOMP_MAP_FROM
8946 || k == GOMP_MAP_TOFROM
8947 || k == GOMP_MAP_ALWAYS_TO
8948 || k == GOMP_MAP_ALWAYS_FROM
8949 || k == GOMP_MAP_ALWAYS_TOFROM)
8950 atf.safe_push (cp);
8951 }
8952
8953 for (unsigned int i = 0; i < atf.length (); i++)
8954 if (atf[i])
8955 {
8956 tree *cp = atf[i];
8957 tree decl = OMP_CLAUSE_DECL (*cp);
8958 if (TREE_CODE (decl) == INDIRECT_REF || TREE_CODE (decl) == MEM_REF)
8959 {
8960 tree base_ptr = TREE_OPERAND (decl, 0);
8961 STRIP_TYPE_NOPS (base_ptr);
8962 for (unsigned int j = i + 1; j < atf.length (); j++)
8963 if (atf[j])
8964 {
8965 tree *cp2 = atf[j];
8966 tree decl2 = OMP_CLAUSE_DECL (*cp2);
8967
8968 decl2 = OMP_CLAUSE_DECL (*cp2);
8969 if (is_or_contains_p (decl2, base_ptr))
8970 {
8971 /* Move *cp2 to before *cp. */
8972 tree c = *cp2;
8973 *cp2 = OMP_CLAUSE_CHAIN (c);
8974 OMP_CLAUSE_CHAIN (c) = *cp;
8975 *cp = c;
8976
8977 if (*cp2 != NULL_TREE
8978 && OMP_CLAUSE_CODE (*cp2) == OMP_CLAUSE_MAP
8979 && OMP_CLAUSE_MAP_KIND (*cp2) == GOMP_MAP_ALWAYS_POINTER)
8980 {
8981 tree c2 = *cp2;
8982 *cp2 = OMP_CLAUSE_CHAIN (c2);
8983 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (c);
8984 OMP_CLAUSE_CHAIN (c) = c2;
8985 }
8986
8987 atf[j] = NULL;
8988 }
8989 }
8990 }
8991 }
8992
8993 /* For attach_detach map clauses, if there is another map that maps the
8994 attached/detached pointer, make sure that map is ordered before the
8995 attach_detach. */
8996 atf.truncate (0);
8997 for (tree *cp = list_p; *cp; cp = &OMP_CLAUSE_CHAIN (*cp))
8998 if (OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP)
8999 {
9000 /* Collect alloc, to, from, to/from clauses, and
9001 always_pointer/attach_detach clauses. */
9002 gomp_map_kind k = OMP_CLAUSE_MAP_KIND (*cp);
9003 if (k == GOMP_MAP_ALLOC
9004 || k == GOMP_MAP_TO
9005 || k == GOMP_MAP_FROM
9006 || k == GOMP_MAP_TOFROM
9007 || k == GOMP_MAP_ALWAYS_TO
9008 || k == GOMP_MAP_ALWAYS_FROM
9009 || k == GOMP_MAP_ALWAYS_TOFROM
9010 || k == GOMP_MAP_ATTACH_DETACH
9011 || k == GOMP_MAP_ALWAYS_POINTER)
9012 atf.safe_push (cp);
9013 }
9014
9015 for (unsigned int i = 0; i < atf.length (); i++)
9016 if (atf[i])
9017 {
9018 tree *cp = atf[i];
9019 tree ptr = OMP_CLAUSE_DECL (*cp);
9020 STRIP_TYPE_NOPS (ptr);
9021 if (OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ATTACH_DETACH)
9022 for (unsigned int j = i + 1; j < atf.length (); j++)
9023 {
9024 tree *cp2 = atf[j];
9025 tree decl2 = OMP_CLAUSE_DECL (*cp2);
9026 if (OMP_CLAUSE_MAP_KIND (*cp2) != GOMP_MAP_ATTACH_DETACH
9027 && OMP_CLAUSE_MAP_KIND (*cp2) != GOMP_MAP_ALWAYS_POINTER
9028 && is_or_contains_p (decl2, ptr))
9029 {
9030 /* Move *cp2 to before *cp. */
9031 tree c = *cp2;
9032 *cp2 = OMP_CLAUSE_CHAIN (c);
9033 OMP_CLAUSE_CHAIN (c) = *cp;
9034 *cp = c;
9035 atf[j] = NULL;
9036
9037 /* If decl2 is of the form '*decl2_opnd0', and followed by an
9038 ALWAYS_POINTER or ATTACH_DETACH of 'decl2_opnd0', move the
9039 pointer operation along with *cp2. This can happen for C++
9040 reference sequences. */
9041 if (j + 1 < atf.length ()
9042 && (TREE_CODE (decl2) == INDIRECT_REF
9043 || TREE_CODE (decl2) == MEM_REF))
9044 {
9045 tree *cp3 = atf[j + 1];
9046 tree decl3 = OMP_CLAUSE_DECL (*cp3);
9047 tree decl2_opnd0 = TREE_OPERAND (decl2, 0);
9048 if ((OMP_CLAUSE_MAP_KIND (*cp3) == GOMP_MAP_ALWAYS_POINTER
9049 || OMP_CLAUSE_MAP_KIND (*cp3) == GOMP_MAP_ATTACH_DETACH)
9050 && operand_equal_p (decl3, decl2_opnd0))
9051 {
9052 /* Also move *cp3 to before *cp. */
9053 c = *cp3;
9054 *cp2 = OMP_CLAUSE_CHAIN (c);
9055 OMP_CLAUSE_CHAIN (c) = *cp;
9056 *cp = c;
9057 atf[j + 1] = NULL;
9058 j += 1;
9059 }
9060 }
9061 }
9062 }
9063 }
9064 }
9065
9066 /* DECL is supposed to have lastprivate semantics in the outer contexts
9067 of combined/composite constructs, starting with OCTX.
9068 Add needed lastprivate, shared or map clause if no data sharing or
9069 mapping clause are present. IMPLICIT_P is true if it is an implicit
9070 clause (IV on simd), in which case the lastprivate will not be
9071 copied to some constructs. */
9072
9073 static void
omp_lastprivate_for_combined_outer_constructs(struct gimplify_omp_ctx * octx,tree decl,bool implicit_p)9074 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx *octx,
9075 tree decl, bool implicit_p)
9076 {
9077 struct gimplify_omp_ctx *orig_octx = octx;
9078 for (; octx; octx = octx->outer_context)
9079 {
9080 if ((octx->region_type == ORT_COMBINED_PARALLEL
9081 || (octx->region_type & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS)
9082 && splay_tree_lookup (octx->variables,
9083 (splay_tree_key) decl) == NULL)
9084 {
9085 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
9086 continue;
9087 }
9088 if ((octx->region_type & ORT_TASK) != 0
9089 && octx->combined_loop
9090 && splay_tree_lookup (octx->variables,
9091 (splay_tree_key) decl) == NULL)
9092 {
9093 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9094 continue;
9095 }
9096 if (implicit_p
9097 && octx->region_type == ORT_WORKSHARE
9098 && octx->combined_loop
9099 && splay_tree_lookup (octx->variables,
9100 (splay_tree_key) decl) == NULL
9101 && octx->outer_context
9102 && octx->outer_context->region_type == ORT_COMBINED_PARALLEL
9103 && splay_tree_lookup (octx->outer_context->variables,
9104 (splay_tree_key) decl) == NULL)
9105 {
9106 octx = octx->outer_context;
9107 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9108 continue;
9109 }
9110 if ((octx->region_type == ORT_WORKSHARE || octx->region_type == ORT_ACC)
9111 && octx->combined_loop
9112 && splay_tree_lookup (octx->variables,
9113 (splay_tree_key) decl) == NULL
9114 && !omp_check_private (octx, decl, false))
9115 {
9116 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9117 continue;
9118 }
9119 if (octx->region_type == ORT_COMBINED_TARGET)
9120 {
9121 splay_tree_node n = splay_tree_lookup (octx->variables,
9122 (splay_tree_key) decl);
9123 if (n == NULL)
9124 {
9125 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
9126 octx = octx->outer_context;
9127 }
9128 else if (!implicit_p
9129 && (n->value & GOVD_FIRSTPRIVATE_IMPLICIT))
9130 {
9131 n->value &= ~(GOVD_FIRSTPRIVATE
9132 | GOVD_FIRSTPRIVATE_IMPLICIT
9133 | GOVD_EXPLICIT);
9134 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
9135 octx = octx->outer_context;
9136 }
9137 }
9138 break;
9139 }
9140 if (octx && (implicit_p || octx != orig_octx))
9141 omp_notice_variable (octx, decl, true);
9142 }
9143
9144 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
9145 and previous omp contexts. */
9146
9147 static void
gimplify_scan_omp_clauses(tree * list_p,gimple_seq * pre_p,enum omp_region_type region_type,enum tree_code code)9148 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
9149 enum omp_region_type region_type,
9150 enum tree_code code)
9151 {
9152 struct gimplify_omp_ctx *ctx, *outer_ctx;
9153 tree c;
9154 hash_map<tree_operand_hash, tree> *struct_map_to_clause = NULL;
9155 hash_map<tree_operand_hash, tree *> *struct_seen_clause = NULL;
9156 hash_set<tree> *struct_deref_set = NULL;
9157 tree *prev_list_p = NULL, *orig_list_p = list_p;
9158 int handled_depend_iterators = -1;
9159 int nowait = -1;
9160
9161 ctx = new_omp_context (region_type);
9162 ctx->code = code;
9163 outer_ctx = ctx->outer_context;
9164 if (code == OMP_TARGET)
9165 {
9166 if (!lang_GNU_Fortran ())
9167 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9168 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
9169 ctx->defaultmap[GDMK_SCALAR_TARGET] = (lang_GNU_Fortran ()
9170 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
9171 }
9172 if (!lang_GNU_Fortran ())
9173 switch (code)
9174 {
9175 case OMP_TARGET:
9176 case OMP_TARGET_DATA:
9177 case OMP_TARGET_ENTER_DATA:
9178 case OMP_TARGET_EXIT_DATA:
9179 case OACC_DECLARE:
9180 case OACC_HOST_DATA:
9181 case OACC_PARALLEL:
9182 case OACC_KERNELS:
9183 ctx->target_firstprivatize_array_bases = true;
9184 default:
9185 break;
9186 }
9187
9188 if (code == OMP_TARGET
9189 || code == OMP_TARGET_DATA
9190 || code == OMP_TARGET_ENTER_DATA
9191 || code == OMP_TARGET_EXIT_DATA)
9192 omp_target_reorder_clauses (list_p);
9193
9194 while ((c = *list_p) != NULL)
9195 {
9196 bool remove = false;
9197 bool notice_outer = true;
9198 const char *check_non_private = NULL;
9199 unsigned int flags;
9200 tree decl;
9201
9202 switch (OMP_CLAUSE_CODE (c))
9203 {
9204 case OMP_CLAUSE_PRIVATE:
9205 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
9206 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
9207 {
9208 flags |= GOVD_PRIVATE_OUTER_REF;
9209 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
9210 }
9211 else
9212 notice_outer = false;
9213 goto do_add;
9214 case OMP_CLAUSE_SHARED:
9215 flags = GOVD_SHARED | GOVD_EXPLICIT;
9216 goto do_add;
9217 case OMP_CLAUSE_FIRSTPRIVATE:
9218 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
9219 check_non_private = "firstprivate";
9220 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9221 {
9222 gcc_assert (code == OMP_TARGET);
9223 flags |= GOVD_FIRSTPRIVATE_IMPLICIT;
9224 }
9225 goto do_add;
9226 case OMP_CLAUSE_LASTPRIVATE:
9227 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
9228 switch (code)
9229 {
9230 case OMP_DISTRIBUTE:
9231 error_at (OMP_CLAUSE_LOCATION (c),
9232 "conditional %<lastprivate%> clause on "
9233 "%qs construct", "distribute");
9234 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
9235 break;
9236 case OMP_TASKLOOP:
9237 error_at (OMP_CLAUSE_LOCATION (c),
9238 "conditional %<lastprivate%> clause on "
9239 "%qs construct", "taskloop");
9240 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
9241 break;
9242 default:
9243 break;
9244 }
9245 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
9246 if (code != OMP_LOOP)
9247 check_non_private = "lastprivate";
9248 decl = OMP_CLAUSE_DECL (c);
9249 if (error_operand_p (decl))
9250 goto do_add;
9251 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
9252 && !lang_hooks.decls.omp_scalar_p (decl, true))
9253 {
9254 error_at (OMP_CLAUSE_LOCATION (c),
9255 "non-scalar variable %qD in conditional "
9256 "%<lastprivate%> clause", decl);
9257 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
9258 }
9259 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
9260 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
9261 omp_lastprivate_for_combined_outer_constructs (outer_ctx, decl,
9262 false);
9263 goto do_add;
9264 case OMP_CLAUSE_REDUCTION:
9265 if (OMP_CLAUSE_REDUCTION_TASK (c))
9266 {
9267 if (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
9268 {
9269 if (nowait == -1)
9270 nowait = omp_find_clause (*list_p,
9271 OMP_CLAUSE_NOWAIT) != NULL_TREE;
9272 if (nowait
9273 && (outer_ctx == NULL
9274 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
9275 {
9276 error_at (OMP_CLAUSE_LOCATION (c),
9277 "%<task%> reduction modifier on a construct "
9278 "with a %<nowait%> clause");
9279 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
9280 }
9281 }
9282 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
9283 {
9284 error_at (OMP_CLAUSE_LOCATION (c),
9285 "invalid %<task%> reduction modifier on construct "
9286 "other than %<parallel%>, %qs, %<sections%> or "
9287 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
9288 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
9289 }
9290 }
9291 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
9292 switch (code)
9293 {
9294 case OMP_SECTIONS:
9295 error_at (OMP_CLAUSE_LOCATION (c),
9296 "%<inscan%> %<reduction%> clause on "
9297 "%qs construct", "sections");
9298 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
9299 break;
9300 case OMP_PARALLEL:
9301 error_at (OMP_CLAUSE_LOCATION (c),
9302 "%<inscan%> %<reduction%> clause on "
9303 "%qs construct", "parallel");
9304 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
9305 break;
9306 case OMP_TEAMS:
9307 error_at (OMP_CLAUSE_LOCATION (c),
9308 "%<inscan%> %<reduction%> clause on "
9309 "%qs construct", "teams");
9310 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
9311 break;
9312 case OMP_TASKLOOP:
9313 error_at (OMP_CLAUSE_LOCATION (c),
9314 "%<inscan%> %<reduction%> clause on "
9315 "%qs construct", "taskloop");
9316 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
9317 break;
9318 case OMP_SCOPE:
9319 error_at (OMP_CLAUSE_LOCATION (c),
9320 "%<inscan%> %<reduction%> clause on "
9321 "%qs construct", "scope");
9322 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
9323 break;
9324 default:
9325 break;
9326 }
9327 /* FALLTHRU */
9328 case OMP_CLAUSE_IN_REDUCTION:
9329 case OMP_CLAUSE_TASK_REDUCTION:
9330 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
9331 /* OpenACC permits reductions on private variables. */
9332 if (!(region_type & ORT_ACC)
9333 /* taskgroup is actually not a worksharing region. */
9334 && code != OMP_TASKGROUP)
9335 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
9336 decl = OMP_CLAUSE_DECL (c);
9337 if (TREE_CODE (decl) == MEM_REF)
9338 {
9339 tree type = TREE_TYPE (decl);
9340 bool saved_into_ssa = gimplify_ctxp->into_ssa;
9341 gimplify_ctxp->into_ssa = false;
9342 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
9343 NULL, is_gimple_val, fb_rvalue, false)
9344 == GS_ERROR)
9345 {
9346 gimplify_ctxp->into_ssa = saved_into_ssa;
9347 remove = true;
9348 break;
9349 }
9350 gimplify_ctxp->into_ssa = saved_into_ssa;
9351 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9352 if (DECL_P (v))
9353 {
9354 omp_firstprivatize_variable (ctx, v);
9355 omp_notice_variable (ctx, v, true);
9356 }
9357 decl = TREE_OPERAND (decl, 0);
9358 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
9359 {
9360 gimplify_ctxp->into_ssa = false;
9361 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
9362 NULL, is_gimple_val, fb_rvalue, false)
9363 == GS_ERROR)
9364 {
9365 gimplify_ctxp->into_ssa = saved_into_ssa;
9366 remove = true;
9367 break;
9368 }
9369 gimplify_ctxp->into_ssa = saved_into_ssa;
9370 v = TREE_OPERAND (decl, 1);
9371 if (DECL_P (v))
9372 {
9373 omp_firstprivatize_variable (ctx, v);
9374 omp_notice_variable (ctx, v, true);
9375 }
9376 decl = TREE_OPERAND (decl, 0);
9377 }
9378 if (TREE_CODE (decl) == ADDR_EXPR
9379 || TREE_CODE (decl) == INDIRECT_REF)
9380 decl = TREE_OPERAND (decl, 0);
9381 }
9382 goto do_add_decl;
9383 case OMP_CLAUSE_LINEAR:
9384 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
9385 is_gimple_val, fb_rvalue) == GS_ERROR)
9386 {
9387 remove = true;
9388 break;
9389 }
9390 else
9391 {
9392 if (code == OMP_SIMD
9393 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
9394 {
9395 struct gimplify_omp_ctx *octx = outer_ctx;
9396 if (octx
9397 && octx->region_type == ORT_WORKSHARE
9398 && octx->combined_loop
9399 && !octx->distribute)
9400 {
9401 if (octx->outer_context
9402 && (octx->outer_context->region_type
9403 == ORT_COMBINED_PARALLEL))
9404 octx = octx->outer_context->outer_context;
9405 else
9406 octx = octx->outer_context;
9407 }
9408 if (octx
9409 && octx->region_type == ORT_WORKSHARE
9410 && octx->combined_loop
9411 && octx->distribute)
9412 {
9413 error_at (OMP_CLAUSE_LOCATION (c),
9414 "%<linear%> clause for variable other than "
9415 "loop iterator specified on construct "
9416 "combined with %<distribute%>");
9417 remove = true;
9418 break;
9419 }
9420 }
9421 /* For combined #pragma omp parallel for simd, need to put
9422 lastprivate and perhaps firstprivate too on the
9423 parallel. Similarly for #pragma omp for simd. */
9424 struct gimplify_omp_ctx *octx = outer_ctx;
9425 bool taskloop_seen = false;
9426 decl = NULL_TREE;
9427 do
9428 {
9429 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
9430 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9431 break;
9432 decl = OMP_CLAUSE_DECL (c);
9433 if (error_operand_p (decl))
9434 {
9435 decl = NULL_TREE;
9436 break;
9437 }
9438 flags = GOVD_SEEN;
9439 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
9440 flags |= GOVD_FIRSTPRIVATE;
9441 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9442 flags |= GOVD_LASTPRIVATE;
9443 if (octx
9444 && octx->region_type == ORT_WORKSHARE
9445 && octx->combined_loop)
9446 {
9447 if (octx->outer_context
9448 && (octx->outer_context->region_type
9449 == ORT_COMBINED_PARALLEL))
9450 octx = octx->outer_context;
9451 else if (omp_check_private (octx, decl, false))
9452 break;
9453 }
9454 else if (octx
9455 && (octx->region_type & ORT_TASK) != 0
9456 && octx->combined_loop)
9457 taskloop_seen = true;
9458 else if (octx
9459 && octx->region_type == ORT_COMBINED_PARALLEL
9460 && ((ctx->region_type == ORT_WORKSHARE
9461 && octx == outer_ctx)
9462 || taskloop_seen))
9463 flags = GOVD_SEEN | GOVD_SHARED;
9464 else if (octx
9465 && ((octx->region_type & ORT_COMBINED_TEAMS)
9466 == ORT_COMBINED_TEAMS))
9467 flags = GOVD_SEEN | GOVD_SHARED;
9468 else if (octx
9469 && octx->region_type == ORT_COMBINED_TARGET)
9470 {
9471 if (flags & GOVD_LASTPRIVATE)
9472 flags = GOVD_SEEN | GOVD_MAP;
9473 }
9474 else
9475 break;
9476 splay_tree_node on
9477 = splay_tree_lookup (octx->variables,
9478 (splay_tree_key) decl);
9479 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
9480 {
9481 octx = NULL;
9482 break;
9483 }
9484 omp_add_variable (octx, decl, flags);
9485 if (octx->outer_context == NULL)
9486 break;
9487 octx = octx->outer_context;
9488 }
9489 while (1);
9490 if (octx
9491 && decl
9492 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
9493 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
9494 omp_notice_variable (octx, decl, true);
9495 }
9496 flags = GOVD_LINEAR | GOVD_EXPLICIT;
9497 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
9498 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9499 {
9500 notice_outer = false;
9501 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9502 }
9503 goto do_add;
9504
9505 case OMP_CLAUSE_MAP:
9506 decl = OMP_CLAUSE_DECL (c);
9507 if (error_operand_p (decl))
9508 remove = true;
9509 switch (code)
9510 {
9511 case OMP_TARGET:
9512 break;
9513 case OACC_DATA:
9514 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
9515 break;
9516 /* FALLTHRU */
9517 case OMP_TARGET_DATA:
9518 case OMP_TARGET_ENTER_DATA:
9519 case OMP_TARGET_EXIT_DATA:
9520 case OACC_ENTER_DATA:
9521 case OACC_EXIT_DATA:
9522 case OACC_HOST_DATA:
9523 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9524 || (OMP_CLAUSE_MAP_KIND (c)
9525 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9526 /* For target {,enter ,exit }data only the array slice is
9527 mapped, but not the pointer to it. */
9528 remove = true;
9529 break;
9530 default:
9531 break;
9532 }
9533 /* For Fortran, not only the pointer to the data is mapped but also
9534 the address of the pointer, the array descriptor etc.; for
9535 'exit data' - and in particular for 'delete:' - having an 'alloc:'
9536 does not make sense. Likewise, for 'update' only transferring the
9537 data itself is needed as the rest has been handled in previous
9538 directives. However, for 'exit data', the array descriptor needs
9539 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE.
9540
9541 NOTE: Generally, it is not safe to perform "enter data" operations
9542 on arrays where the data *or the descriptor* may go out of scope
9543 before a corresponding "exit data" operation -- and such a
9544 descriptor may be synthesized temporarily, e.g. to pass an
9545 explicit-shape array to a function expecting an assumed-shape
9546 argument. Performing "enter data" inside the called function
9547 would thus be problematic. */
9548 if (code == OMP_TARGET_EXIT_DATA
9549 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET)
9550 OMP_CLAUSE_SET_MAP_KIND (c, OMP_CLAUSE_MAP_KIND (*prev_list_p)
9551 == GOMP_MAP_DELETE
9552 ? GOMP_MAP_DELETE : GOMP_MAP_RELEASE);
9553 else if ((code == OMP_TARGET_EXIT_DATA || code == OMP_TARGET_UPDATE)
9554 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
9555 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET))
9556 remove = true;
9557
9558 if (remove)
9559 break;
9560 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
9561 {
9562 struct gimplify_omp_ctx *octx;
9563 for (octx = outer_ctx; octx; octx = octx->outer_context)
9564 {
9565 if (octx->region_type != ORT_ACC_HOST_DATA)
9566 break;
9567 splay_tree_node n2
9568 = splay_tree_lookup (octx->variables,
9569 (splay_tree_key) decl);
9570 if (n2)
9571 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
9572 "declared in enclosing %<host_data%> region",
9573 DECL_NAME (decl));
9574 }
9575 }
9576 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9577 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
9578 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
9579 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
9580 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
9581 {
9582 remove = true;
9583 break;
9584 }
9585 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9586 || (OMP_CLAUSE_MAP_KIND (c)
9587 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9588 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9589 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
9590 {
9591 OMP_CLAUSE_SIZE (c)
9592 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
9593 false);
9594 if ((region_type & ORT_TARGET) != 0)
9595 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
9596 GOVD_FIRSTPRIVATE | GOVD_SEEN);
9597 }
9598
9599 if (TREE_CODE (decl) == TARGET_EXPR)
9600 {
9601 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9602 is_gimple_lvalue, fb_lvalue)
9603 == GS_ERROR)
9604 remove = true;
9605 }
9606 else if (!DECL_P (decl))
9607 {
9608 tree d = decl, *pd;
9609 if (TREE_CODE (d) == ARRAY_REF)
9610 {
9611 while (TREE_CODE (d) == ARRAY_REF)
9612 d = TREE_OPERAND (d, 0);
9613 if (TREE_CODE (d) == COMPONENT_REF
9614 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
9615 decl = d;
9616 }
9617 pd = &OMP_CLAUSE_DECL (c);
9618 if (d == decl
9619 && TREE_CODE (decl) == INDIRECT_REF
9620 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9621 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9622 == REFERENCE_TYPE)
9623 && (OMP_CLAUSE_MAP_KIND (c)
9624 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
9625 {
9626 pd = &TREE_OPERAND (decl, 0);
9627 decl = TREE_OPERAND (decl, 0);
9628 }
9629 bool indir_p = false;
9630 bool component_ref_p = false;
9631 tree indir_base = NULL_TREE;
9632 tree orig_decl = decl;
9633 tree decl_ref = NULL_TREE;
9634 if ((region_type & (ORT_ACC | ORT_TARGET | ORT_TARGET_DATA)) != 0
9635 && TREE_CODE (*pd) == COMPONENT_REF
9636 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH
9637 && code != OACC_UPDATE)
9638 {
9639 while (TREE_CODE (decl) == COMPONENT_REF)
9640 {
9641 decl = TREE_OPERAND (decl, 0);
9642 component_ref_p = true;
9643 if (((TREE_CODE (decl) == MEM_REF
9644 && integer_zerop (TREE_OPERAND (decl, 1)))
9645 || INDIRECT_REF_P (decl))
9646 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9647 == POINTER_TYPE))
9648 {
9649 indir_p = true;
9650 indir_base = decl;
9651 decl = TREE_OPERAND (decl, 0);
9652 STRIP_NOPS (decl);
9653 }
9654 if (TREE_CODE (decl) == INDIRECT_REF
9655 && DECL_P (TREE_OPERAND (decl, 0))
9656 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9657 == REFERENCE_TYPE))
9658 {
9659 decl_ref = decl;
9660 decl = TREE_OPERAND (decl, 0);
9661 }
9662 }
9663 }
9664 else if (TREE_CODE (decl) == COMPONENT_REF
9665 && (OMP_CLAUSE_MAP_KIND (c)
9666 != GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION))
9667 {
9668 component_ref_p = true;
9669 while (TREE_CODE (decl) == COMPONENT_REF)
9670 decl = TREE_OPERAND (decl, 0);
9671 if (TREE_CODE (decl) == INDIRECT_REF
9672 && DECL_P (TREE_OPERAND (decl, 0))
9673 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9674 == REFERENCE_TYPE))
9675 decl = TREE_OPERAND (decl, 0);
9676 }
9677 if (decl != orig_decl && DECL_P (decl) && indir_p
9678 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9679 || (decl_ref
9680 && TREE_CODE (TREE_TYPE (decl_ref)) == POINTER_TYPE)))
9681 {
9682 gomp_map_kind k
9683 = ((code == OACC_EXIT_DATA || code == OMP_TARGET_EXIT_DATA)
9684 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
9685 /* We have a dereference of a struct member. Make this an
9686 attach/detach operation, and ensure the base pointer is
9687 mapped as a FIRSTPRIVATE_POINTER. */
9688 OMP_CLAUSE_SET_MAP_KIND (c, k);
9689 flags = GOVD_MAP | GOVD_SEEN | GOVD_EXPLICIT;
9690 tree next_clause = OMP_CLAUSE_CHAIN (c);
9691 if (k == GOMP_MAP_ATTACH
9692 && code != OACC_ENTER_DATA
9693 && code != OMP_TARGET_ENTER_DATA
9694 && (!next_clause
9695 || (OMP_CLAUSE_CODE (next_clause) != OMP_CLAUSE_MAP)
9696 || (OMP_CLAUSE_MAP_KIND (next_clause)
9697 != GOMP_MAP_POINTER)
9698 || OMP_CLAUSE_DECL (next_clause) != decl)
9699 && (!struct_deref_set
9700 || !struct_deref_set->contains (decl))
9701 && (!struct_map_to_clause
9702 || !struct_map_to_clause->get (indir_base)))
9703 {
9704 if (!struct_deref_set)
9705 struct_deref_set = new hash_set<tree> ();
9706 /* As well as the attach, we also need a
9707 FIRSTPRIVATE_POINTER clause to properly map the
9708 pointer to the struct base. */
9709 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9710 OMP_CLAUSE_MAP);
9711 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALLOC);
9712 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c2)
9713 = 1;
9714 tree charptr_zero
9715 = build_int_cst (build_pointer_type (char_type_node),
9716 0);
9717 OMP_CLAUSE_DECL (c2)
9718 = build2 (MEM_REF, char_type_node,
9719 decl_ref ? decl_ref : decl, charptr_zero);
9720 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9721 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9722 OMP_CLAUSE_MAP);
9723 OMP_CLAUSE_SET_MAP_KIND (c3,
9724 GOMP_MAP_FIRSTPRIVATE_POINTER);
9725 OMP_CLAUSE_DECL (c3) = decl;
9726 OMP_CLAUSE_SIZE (c3) = size_zero_node;
9727 tree mapgrp = *prev_list_p;
9728 *prev_list_p = c2;
9729 OMP_CLAUSE_CHAIN (c3) = mapgrp;
9730 OMP_CLAUSE_CHAIN (c2) = c3;
9731
9732 struct_deref_set->add (decl);
9733 }
9734 goto do_add_decl;
9735 }
9736 /* An "attach/detach" operation on an update directive should
9737 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
9738 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
9739 depends on the previous mapping. */
9740 if (code == OACC_UPDATE
9741 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9742 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
9743 if ((DECL_P (decl)
9744 || (component_ref_p
9745 && (INDIRECT_REF_P (decl)
9746 || TREE_CODE (decl) == MEM_REF
9747 || TREE_CODE (decl) == ARRAY_REF)))
9748 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
9749 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
9750 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
9751 && code != OACC_UPDATE
9752 && code != OMP_TARGET_UPDATE)
9753 {
9754 if (error_operand_p (decl))
9755 {
9756 remove = true;
9757 break;
9758 }
9759
9760 tree stype = TREE_TYPE (decl);
9761 if (TREE_CODE (stype) == REFERENCE_TYPE)
9762 stype = TREE_TYPE (stype);
9763 if (TYPE_SIZE_UNIT (stype) == NULL
9764 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
9765 {
9766 error_at (OMP_CLAUSE_LOCATION (c),
9767 "mapping field %qE of variable length "
9768 "structure", OMP_CLAUSE_DECL (c));
9769 remove = true;
9770 break;
9771 }
9772
9773 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
9774 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9775 {
9776 /* Error recovery. */
9777 if (prev_list_p == NULL)
9778 {
9779 remove = true;
9780 break;
9781 }
9782
9783 /* The below prev_list_p based error recovery code is
9784 currently no longer valid for OpenMP. */
9785 if (code != OMP_TARGET
9786 && code != OMP_TARGET_DATA
9787 && code != OMP_TARGET_UPDATE
9788 && code != OMP_TARGET_ENTER_DATA
9789 && code != OMP_TARGET_EXIT_DATA
9790 && OMP_CLAUSE_CHAIN (*prev_list_p) != c)
9791 {
9792 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
9793 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
9794 {
9795 remove = true;
9796 break;
9797 }
9798 }
9799 }
9800
9801 poly_offset_int offset1;
9802 poly_int64 bitpos1;
9803 tree tree_offset1;
9804 tree base_ref;
9805
9806 tree base
9807 = extract_base_bit_offset (OMP_CLAUSE_DECL (c), &base_ref,
9808 &bitpos1, &offset1,
9809 &tree_offset1);
9810
9811 bool do_map_struct = (base == decl && !tree_offset1);
9812
9813 splay_tree_node n
9814 = (DECL_P (decl)
9815 ? splay_tree_lookup (ctx->variables,
9816 (splay_tree_key) decl)
9817 : NULL);
9818 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
9819 == GOMP_MAP_ALWAYS_POINTER);
9820 bool attach_detach = (OMP_CLAUSE_MAP_KIND (c)
9821 == GOMP_MAP_ATTACH_DETACH);
9822 bool attach = OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
9823 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH;
9824 bool has_attachments = false;
9825 /* For OpenACC, pointers in structs should trigger an
9826 attach action. */
9827 if (attach_detach
9828 && ((region_type & (ORT_ACC | ORT_TARGET | ORT_TARGET_DATA))
9829 || code == OMP_TARGET_ENTER_DATA
9830 || code == OMP_TARGET_EXIT_DATA))
9831
9832 {
9833 /* Turn a GOMP_MAP_ATTACH_DETACH clause into a
9834 GOMP_MAP_ATTACH or GOMP_MAP_DETACH clause after we
9835 have detected a case that needs a GOMP_MAP_STRUCT
9836 mapping added. */
9837 gomp_map_kind k
9838 = ((code == OACC_EXIT_DATA || code == OMP_TARGET_EXIT_DATA)
9839 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
9840 OMP_CLAUSE_SET_MAP_KIND (c, k);
9841 has_attachments = true;
9842 }
9843
9844 /* We currently don't handle non-constant offset accesses wrt to
9845 GOMP_MAP_STRUCT elements. */
9846 if (!do_map_struct)
9847 goto skip_map_struct;
9848
9849 /* Nor for attach_detach for OpenMP. */
9850 if ((code == OMP_TARGET
9851 || code == OMP_TARGET_DATA
9852 || code == OMP_TARGET_UPDATE
9853 || code == OMP_TARGET_ENTER_DATA
9854 || code == OMP_TARGET_EXIT_DATA)
9855 && attach_detach)
9856 {
9857 if (DECL_P (decl))
9858 {
9859 if (struct_seen_clause == NULL)
9860 struct_seen_clause
9861 = new hash_map<tree_operand_hash, tree *>;
9862 if (!struct_seen_clause->get (decl))
9863 struct_seen_clause->put (decl, list_p);
9864 }
9865
9866 goto skip_map_struct;
9867 }
9868
9869 if ((DECL_P (decl)
9870 && (n == NULL || (n->value & GOVD_MAP) == 0))
9871 || (!DECL_P (decl)
9872 && (!struct_map_to_clause
9873 || struct_map_to_clause->get (decl) == NULL)))
9874 {
9875 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9876 OMP_CLAUSE_MAP);
9877 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT
9878 : GOMP_MAP_STRUCT;
9879
9880 OMP_CLAUSE_SET_MAP_KIND (l, k);
9881 if (base_ref)
9882 OMP_CLAUSE_DECL (l) = unshare_expr (base_ref);
9883 else
9884 {
9885 OMP_CLAUSE_DECL (l) = unshare_expr (decl);
9886 if (!DECL_P (OMP_CLAUSE_DECL (l))
9887 && (gimplify_expr (&OMP_CLAUSE_DECL (l),
9888 pre_p, NULL, is_gimple_lvalue,
9889 fb_lvalue)
9890 == GS_ERROR))
9891 {
9892 remove = true;
9893 break;
9894 }
9895 }
9896 OMP_CLAUSE_SIZE (l)
9897 = (!attach
9898 ? size_int (1)
9899 : DECL_P (OMP_CLAUSE_DECL (l))
9900 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
9901 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l))));
9902 if (struct_map_to_clause == NULL)
9903 struct_map_to_clause
9904 = new hash_map<tree_operand_hash, tree>;
9905 struct_map_to_clause->put (decl, l);
9906 if (ptr || attach_detach)
9907 {
9908 tree **sc = (struct_seen_clause
9909 ? struct_seen_clause->get (decl)
9910 : NULL);
9911 tree *insert_node_pos = sc ? *sc : prev_list_p;
9912
9913 insert_struct_comp_map (code, c, l, *insert_node_pos,
9914 NULL);
9915 *insert_node_pos = l;
9916 prev_list_p = NULL;
9917 }
9918 else
9919 {
9920 OMP_CLAUSE_CHAIN (l) = c;
9921 *list_p = l;
9922 list_p = &OMP_CLAUSE_CHAIN (l);
9923 }
9924 if (base_ref && code == OMP_TARGET)
9925 {
9926 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9927 OMP_CLAUSE_MAP);
9928 enum gomp_map_kind mkind
9929 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
9930 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9931 OMP_CLAUSE_DECL (c2) = decl;
9932 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9933 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
9934 OMP_CLAUSE_CHAIN (l) = c2;
9935 }
9936 flags = GOVD_MAP | GOVD_EXPLICIT;
9937 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9938 || ptr
9939 || attach_detach)
9940 flags |= GOVD_SEEN;
9941 if (has_attachments)
9942 flags |= GOVD_MAP_HAS_ATTACHMENTS;
9943
9944 /* If this is a *pointer-to-struct expression, make sure a
9945 firstprivate map of the base-pointer exists. */
9946 if (component_ref_p
9947 && ((TREE_CODE (decl) == MEM_REF
9948 && integer_zerop (TREE_OPERAND (decl, 1)))
9949 || INDIRECT_REF_P (decl))
9950 && DECL_P (TREE_OPERAND (decl, 0))
9951 && !splay_tree_lookup (ctx->variables,
9952 ((splay_tree_key)
9953 TREE_OPERAND (decl, 0))))
9954 {
9955 decl = TREE_OPERAND (decl, 0);
9956 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9957 OMP_CLAUSE_MAP);
9958 enum gomp_map_kind mkind
9959 = GOMP_MAP_FIRSTPRIVATE_POINTER;
9960 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9961 OMP_CLAUSE_DECL (c2) = decl;
9962 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9963 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (c);
9964 OMP_CLAUSE_CHAIN (c) = c2;
9965 }
9966
9967 if (DECL_P (decl))
9968 goto do_add_decl;
9969 }
9970 else if (struct_map_to_clause)
9971 {
9972 tree *osc = struct_map_to_clause->get (decl);
9973 tree *sc = NULL, *scp = NULL;
9974 if (n != NULL
9975 && (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9976 || ptr
9977 || attach_detach))
9978 n->value |= GOVD_SEEN;
9979 sc = &OMP_CLAUSE_CHAIN (*osc);
9980 if (*sc != c
9981 && (OMP_CLAUSE_MAP_KIND (*sc)
9982 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9983 sc = &OMP_CLAUSE_CHAIN (*sc);
9984 /* Here "prev_list_p" is the end of the inserted
9985 alloc/release nodes after the struct node, OSC. */
9986 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
9987 if ((ptr || attach_detach) && sc == prev_list_p)
9988 break;
9989 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9990 != COMPONENT_REF
9991 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9992 != INDIRECT_REF)
9993 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9994 != ARRAY_REF))
9995 break;
9996 else
9997 {
9998 tree sc_decl = OMP_CLAUSE_DECL (*sc);
9999 poly_offset_int offsetn;
10000 poly_int64 bitposn;
10001 tree tree_offsetn;
10002 tree base
10003 = extract_base_bit_offset (sc_decl, NULL,
10004 &bitposn, &offsetn,
10005 &tree_offsetn);
10006 if (base != decl)
10007 break;
10008 if (scp)
10009 continue;
10010 if ((region_type & ORT_ACC) != 0)
10011 {
10012 /* This duplicate checking code is currently only
10013 enabled for OpenACC. */
10014 tree d1 = OMP_CLAUSE_DECL (*sc);
10015 tree d2 = OMP_CLAUSE_DECL (c);
10016 while (TREE_CODE (d1) == ARRAY_REF)
10017 d1 = TREE_OPERAND (d1, 0);
10018 while (TREE_CODE (d2) == ARRAY_REF)
10019 d2 = TREE_OPERAND (d2, 0);
10020 if (TREE_CODE (d1) == INDIRECT_REF)
10021 d1 = TREE_OPERAND (d1, 0);
10022 if (TREE_CODE (d2) == INDIRECT_REF)
10023 d2 = TREE_OPERAND (d2, 0);
10024 while (TREE_CODE (d1) == COMPONENT_REF)
10025 if (TREE_CODE (d2) == COMPONENT_REF
10026 && TREE_OPERAND (d1, 1)
10027 == TREE_OPERAND (d2, 1))
10028 {
10029 d1 = TREE_OPERAND (d1, 0);
10030 d2 = TREE_OPERAND (d2, 0);
10031 }
10032 else
10033 break;
10034 if (d1 == d2)
10035 {
10036 error_at (OMP_CLAUSE_LOCATION (c),
10037 "%qE appears more than once in map "
10038 "clauses", OMP_CLAUSE_DECL (c));
10039 remove = true;
10040 break;
10041 }
10042 }
10043 if (maybe_lt (offset1, offsetn)
10044 || (known_eq (offset1, offsetn)
10045 && maybe_lt (bitpos1, bitposn)))
10046 {
10047 if (ptr || attach_detach)
10048 scp = sc;
10049 else
10050 break;
10051 }
10052 }
10053 if (remove)
10054 break;
10055 if (!attach)
10056 OMP_CLAUSE_SIZE (*osc)
10057 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
10058 size_one_node);
10059 if (ptr || attach_detach)
10060 {
10061 tree cl = insert_struct_comp_map (code, c, NULL,
10062 *prev_list_p, scp);
10063 if (sc == prev_list_p)
10064 {
10065 *sc = cl;
10066 prev_list_p = NULL;
10067 }
10068 else
10069 {
10070 *prev_list_p = OMP_CLAUSE_CHAIN (c);
10071 list_p = prev_list_p;
10072 prev_list_p = NULL;
10073 OMP_CLAUSE_CHAIN (c) = *sc;
10074 *sc = cl;
10075 continue;
10076 }
10077 }
10078 else if (*sc != c)
10079 {
10080 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
10081 fb_lvalue)
10082 == GS_ERROR)
10083 {
10084 remove = true;
10085 break;
10086 }
10087 *list_p = OMP_CLAUSE_CHAIN (c);
10088 OMP_CLAUSE_CHAIN (c) = *sc;
10089 *sc = c;
10090 continue;
10091 }
10092 }
10093 skip_map_struct:
10094 ;
10095 }
10096 else if ((code == OACC_ENTER_DATA
10097 || code == OACC_EXIT_DATA
10098 || code == OACC_DATA
10099 || code == OACC_PARALLEL
10100 || code == OACC_KERNELS
10101 || code == OACC_SERIAL
10102 || code == OMP_TARGET_ENTER_DATA
10103 || code == OMP_TARGET_EXIT_DATA)
10104 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
10105 {
10106 gomp_map_kind k = ((code == OACC_EXIT_DATA
10107 || code == OMP_TARGET_EXIT_DATA)
10108 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
10109 OMP_CLAUSE_SET_MAP_KIND (c, k);
10110 }
10111
10112 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
10113 {
10114 /* Don't gimplify *pd fully at this point, as the base
10115 will need to be adjusted during omp lowering. */
10116 auto_vec<tree, 10> expr_stack;
10117 tree *p = pd;
10118 while (handled_component_p (*p)
10119 || TREE_CODE (*p) == INDIRECT_REF
10120 || TREE_CODE (*p) == ADDR_EXPR
10121 || TREE_CODE (*p) == MEM_REF
10122 || TREE_CODE (*p) == NON_LVALUE_EXPR)
10123 {
10124 expr_stack.safe_push (*p);
10125 p = &TREE_OPERAND (*p, 0);
10126 }
10127 for (int i = expr_stack.length () - 1; i >= 0; i--)
10128 {
10129 tree t = expr_stack[i];
10130 if (TREE_CODE (t) == ARRAY_REF
10131 || TREE_CODE (t) == ARRAY_RANGE_REF)
10132 {
10133 if (TREE_OPERAND (t, 2) == NULL_TREE)
10134 {
10135 tree low = unshare_expr (array_ref_low_bound (t));
10136 if (!is_gimple_min_invariant (low))
10137 {
10138 TREE_OPERAND (t, 2) = low;
10139 if (gimplify_expr (&TREE_OPERAND (t, 2),
10140 pre_p, NULL,
10141 is_gimple_reg,
10142 fb_rvalue) == GS_ERROR)
10143 remove = true;
10144 }
10145 }
10146 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
10147 NULL, is_gimple_reg,
10148 fb_rvalue) == GS_ERROR)
10149 remove = true;
10150 if (TREE_OPERAND (t, 3) == NULL_TREE)
10151 {
10152 tree elmt_size = array_ref_element_size (t);
10153 if (!is_gimple_min_invariant (elmt_size))
10154 {
10155 elmt_size = unshare_expr (elmt_size);
10156 tree elmt_type
10157 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t,
10158 0)));
10159 tree factor
10160 = size_int (TYPE_ALIGN_UNIT (elmt_type));
10161 elmt_size
10162 = size_binop (EXACT_DIV_EXPR, elmt_size,
10163 factor);
10164 TREE_OPERAND (t, 3) = elmt_size;
10165 if (gimplify_expr (&TREE_OPERAND (t, 3),
10166 pre_p, NULL,
10167 is_gimple_reg,
10168 fb_rvalue) == GS_ERROR)
10169 remove = true;
10170 }
10171 }
10172 else if (gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
10173 NULL, is_gimple_reg,
10174 fb_rvalue) == GS_ERROR)
10175 remove = true;
10176 }
10177 else if (TREE_CODE (t) == COMPONENT_REF)
10178 {
10179 if (TREE_OPERAND (t, 2) == NULL_TREE)
10180 {
10181 tree offset = component_ref_field_offset (t);
10182 if (!is_gimple_min_invariant (offset))
10183 {
10184 offset = unshare_expr (offset);
10185 tree field = TREE_OPERAND (t, 1);
10186 tree factor
10187 = size_int (DECL_OFFSET_ALIGN (field)
10188 / BITS_PER_UNIT);
10189 offset = size_binop (EXACT_DIV_EXPR, offset,
10190 factor);
10191 TREE_OPERAND (t, 2) = offset;
10192 if (gimplify_expr (&TREE_OPERAND (t, 2),
10193 pre_p, NULL,
10194 is_gimple_reg,
10195 fb_rvalue) == GS_ERROR)
10196 remove = true;
10197 }
10198 }
10199 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
10200 NULL, is_gimple_reg,
10201 fb_rvalue) == GS_ERROR)
10202 remove = true;
10203 }
10204 }
10205 for (; expr_stack.length () > 0; )
10206 {
10207 tree t = expr_stack.pop ();
10208
10209 if (TREE_CODE (t) == ARRAY_REF
10210 || TREE_CODE (t) == ARRAY_RANGE_REF)
10211 {
10212 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))
10213 && gimplify_expr (&TREE_OPERAND (t, 1), pre_p,
10214 NULL, is_gimple_val,
10215 fb_rvalue) == GS_ERROR)
10216 remove = true;
10217 }
10218 }
10219 }
10220 else if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
10221 fb_lvalue) == GS_ERROR)
10222 {
10223 remove = true;
10224 break;
10225 }
10226
10227 /* If this was of the form map(*pointer_to_struct), then the
10228 'pointer_to_struct' DECL should be considered deref'ed. */
10229 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALLOC
10230 || GOMP_MAP_COPY_TO_P (OMP_CLAUSE_MAP_KIND (c))
10231 || GOMP_MAP_COPY_FROM_P (OMP_CLAUSE_MAP_KIND (c)))
10232 && INDIRECT_REF_P (orig_decl)
10233 && DECL_P (TREE_OPERAND (orig_decl, 0))
10234 && TREE_CODE (TREE_TYPE (orig_decl)) == RECORD_TYPE)
10235 {
10236 tree ptr = TREE_OPERAND (orig_decl, 0);
10237 if (!struct_deref_set || !struct_deref_set->contains (ptr))
10238 {
10239 if (!struct_deref_set)
10240 struct_deref_set = new hash_set<tree> ();
10241 struct_deref_set->add (ptr);
10242 }
10243 }
10244
10245 if (!remove
10246 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
10247 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
10248 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
10249 && OMP_CLAUSE_CHAIN (c)
10250 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
10251 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10252 == GOMP_MAP_ALWAYS_POINTER)
10253 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10254 == GOMP_MAP_ATTACH_DETACH)
10255 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10256 == GOMP_MAP_TO_PSET)))
10257 prev_list_p = list_p;
10258
10259 break;
10260 }
10261 else
10262 {
10263 /* DECL_P (decl) == true */
10264 tree *sc;
10265 if (struct_map_to_clause
10266 && (sc = struct_map_to_clause->get (decl)) != NULL
10267 && OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_STRUCT
10268 && decl == OMP_CLAUSE_DECL (*sc))
10269 {
10270 /* We have found a map of the whole structure after a
10271 leading GOMP_MAP_STRUCT has been created, so refill the
10272 leading clause into a map of the whole structure
10273 variable, and remove the current one.
10274 TODO: we should be able to remove some maps of the
10275 following structure element maps if they are of
10276 compatible TO/FROM/ALLOC type. */
10277 OMP_CLAUSE_SET_MAP_KIND (*sc, OMP_CLAUSE_MAP_KIND (c));
10278 OMP_CLAUSE_SIZE (*sc) = unshare_expr (OMP_CLAUSE_SIZE (c));
10279 remove = true;
10280 break;
10281 }
10282 }
10283 flags = GOVD_MAP | GOVD_EXPLICIT;
10284 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
10285 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
10286 flags |= GOVD_MAP_ALWAYS_TO;
10287
10288 if ((code == OMP_TARGET
10289 || code == OMP_TARGET_DATA
10290 || code == OMP_TARGET_ENTER_DATA
10291 || code == OMP_TARGET_EXIT_DATA)
10292 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
10293 {
10294 for (struct gimplify_omp_ctx *octx = outer_ctx; octx;
10295 octx = octx->outer_context)
10296 {
10297 splay_tree_node n
10298 = splay_tree_lookup (octx->variables,
10299 (splay_tree_key) OMP_CLAUSE_DECL (c));
10300 /* If this is contained in an outer OpenMP region as a
10301 firstprivate value, remove the attach/detach. */
10302 if (n && (n->value & GOVD_FIRSTPRIVATE))
10303 {
10304 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FIRSTPRIVATE_POINTER);
10305 goto do_add;
10306 }
10307 }
10308
10309 enum gomp_map_kind map_kind = (code == OMP_TARGET_EXIT_DATA
10310 ? GOMP_MAP_DETACH
10311 : GOMP_MAP_ATTACH);
10312 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
10313 }
10314
10315 goto do_add;
10316
10317 case OMP_CLAUSE_AFFINITY:
10318 gimplify_omp_affinity (list_p, pre_p);
10319 remove = true;
10320 break;
10321 case OMP_CLAUSE_DEPEND:
10322 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
10323 {
10324 tree deps = OMP_CLAUSE_DECL (c);
10325 while (deps && TREE_CODE (deps) == TREE_LIST)
10326 {
10327 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
10328 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
10329 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
10330 pre_p, NULL, is_gimple_val, fb_rvalue);
10331 deps = TREE_CHAIN (deps);
10332 }
10333 break;
10334 }
10335 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
10336 break;
10337 if (handled_depend_iterators == -1)
10338 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
10339 if (handled_depend_iterators)
10340 {
10341 if (handled_depend_iterators == 2)
10342 remove = true;
10343 break;
10344 }
10345 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
10346 {
10347 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
10348 NULL, is_gimple_val, fb_rvalue);
10349 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
10350 }
10351 if (error_operand_p (OMP_CLAUSE_DECL (c)))
10352 {
10353 remove = true;
10354 break;
10355 }
10356 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
10357 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
10358 is_gimple_val, fb_rvalue) == GS_ERROR)
10359 {
10360 remove = true;
10361 break;
10362 }
10363 if (code == OMP_TASK)
10364 ctx->has_depend = true;
10365 break;
10366
10367 case OMP_CLAUSE_TO:
10368 case OMP_CLAUSE_FROM:
10369 case OMP_CLAUSE__CACHE_:
10370 decl = OMP_CLAUSE_DECL (c);
10371 if (error_operand_p (decl))
10372 {
10373 remove = true;
10374 break;
10375 }
10376 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10377 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
10378 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
10379 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
10380 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
10381 {
10382 remove = true;
10383 break;
10384 }
10385 if (!DECL_P (decl))
10386 {
10387 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
10388 NULL, is_gimple_lvalue, fb_lvalue)
10389 == GS_ERROR)
10390 {
10391 remove = true;
10392 break;
10393 }
10394 break;
10395 }
10396 goto do_notice;
10397
10398 case OMP_CLAUSE_USE_DEVICE_PTR:
10399 case OMP_CLAUSE_USE_DEVICE_ADDR:
10400 flags = GOVD_EXPLICIT;
10401 goto do_add;
10402
10403 case OMP_CLAUSE_HAS_DEVICE_ADDR:
10404 decl = OMP_CLAUSE_DECL (c);
10405 while (TREE_CODE (decl) == INDIRECT_REF
10406 || TREE_CODE (decl) == ARRAY_REF)
10407 decl = TREE_OPERAND (decl, 0);
10408 flags = GOVD_EXPLICIT;
10409 goto do_add_decl;
10410
10411 case OMP_CLAUSE_IS_DEVICE_PTR:
10412 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
10413 goto do_add;
10414
10415 do_add:
10416 decl = OMP_CLAUSE_DECL (c);
10417 do_add_decl:
10418 if (error_operand_p (decl))
10419 {
10420 remove = true;
10421 break;
10422 }
10423 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
10424 {
10425 tree t = omp_member_access_dummy_var (decl);
10426 if (t)
10427 {
10428 tree v = DECL_VALUE_EXPR (decl);
10429 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
10430 if (outer_ctx)
10431 omp_notice_variable (outer_ctx, t, true);
10432 }
10433 }
10434 if (code == OACC_DATA
10435 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10436 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
10437 flags |= GOVD_MAP_0LEN_ARRAY;
10438 omp_add_variable (ctx, decl, flags);
10439 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10440 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
10441 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
10442 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10443 {
10444 struct gimplify_omp_ctx *pctx
10445 = code == OMP_TARGET ? outer_ctx : ctx;
10446 if (pctx)
10447 omp_add_variable (pctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
10448 GOVD_LOCAL | GOVD_SEEN);
10449 if (pctx
10450 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
10451 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
10452 find_decl_expr,
10453 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
10454 NULL) == NULL_TREE)
10455 omp_add_variable (pctx,
10456 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
10457 GOVD_LOCAL | GOVD_SEEN);
10458 gimplify_omp_ctxp = pctx;
10459 push_gimplify_context ();
10460
10461 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10462 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10463
10464 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
10465 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
10466 pop_gimplify_context
10467 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
10468 push_gimplify_context ();
10469 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
10470 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
10471 pop_gimplify_context
10472 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
10473 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
10474 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
10475
10476 gimplify_omp_ctxp = outer_ctx;
10477 }
10478 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10479 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
10480 {
10481 gimplify_omp_ctxp = ctx;
10482 push_gimplify_context ();
10483 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
10484 {
10485 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
10486 NULL, NULL);
10487 TREE_SIDE_EFFECTS (bind) = 1;
10488 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
10489 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
10490 }
10491 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
10492 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
10493 pop_gimplify_context
10494 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
10495 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
10496
10497 gimplify_omp_ctxp = outer_ctx;
10498 }
10499 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10500 && OMP_CLAUSE_LINEAR_STMT (c))
10501 {
10502 gimplify_omp_ctxp = ctx;
10503 push_gimplify_context ();
10504 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
10505 {
10506 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
10507 NULL, NULL);
10508 TREE_SIDE_EFFECTS (bind) = 1;
10509 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
10510 OMP_CLAUSE_LINEAR_STMT (c) = bind;
10511 }
10512 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
10513 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
10514 pop_gimplify_context
10515 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
10516 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
10517
10518 gimplify_omp_ctxp = outer_ctx;
10519 }
10520 if (notice_outer)
10521 goto do_notice;
10522 break;
10523
10524 case OMP_CLAUSE_COPYIN:
10525 case OMP_CLAUSE_COPYPRIVATE:
10526 decl = OMP_CLAUSE_DECL (c);
10527 if (error_operand_p (decl))
10528 {
10529 remove = true;
10530 break;
10531 }
10532 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
10533 && !remove
10534 && !omp_check_private (ctx, decl, true))
10535 {
10536 remove = true;
10537 if (is_global_var (decl))
10538 {
10539 if (DECL_THREAD_LOCAL_P (decl))
10540 remove = false;
10541 else if (DECL_HAS_VALUE_EXPR_P (decl))
10542 {
10543 tree value = get_base_address (DECL_VALUE_EXPR (decl));
10544
10545 if (value
10546 && DECL_P (value)
10547 && DECL_THREAD_LOCAL_P (value))
10548 remove = false;
10549 }
10550 }
10551 if (remove)
10552 error_at (OMP_CLAUSE_LOCATION (c),
10553 "copyprivate variable %qE is not threadprivate"
10554 " or private in outer context", DECL_NAME (decl));
10555 }
10556 do_notice:
10557 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10558 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
10559 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
10560 && outer_ctx
10561 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
10562 || (region_type == ORT_WORKSHARE
10563 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10564 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
10565 || code == OMP_LOOP)))
10566 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
10567 || (code == OMP_LOOP
10568 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10569 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
10570 == ORT_COMBINED_TEAMS))))
10571 {
10572 splay_tree_node on
10573 = splay_tree_lookup (outer_ctx->variables,
10574 (splay_tree_key)decl);
10575 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
10576 {
10577 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10578 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
10579 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
10580 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10581 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
10582 == POINTER_TYPE))))
10583 omp_firstprivatize_variable (outer_ctx, decl);
10584 else
10585 {
10586 omp_add_variable (outer_ctx, decl,
10587 GOVD_SEEN | GOVD_SHARED);
10588 if (outer_ctx->outer_context)
10589 omp_notice_variable (outer_ctx->outer_context, decl,
10590 true);
10591 }
10592 }
10593 }
10594 if (outer_ctx)
10595 omp_notice_variable (outer_ctx, decl, true);
10596 if (check_non_private
10597 && (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
10598 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
10599 || decl == OMP_CLAUSE_DECL (c)
10600 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
10601 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
10602 == ADDR_EXPR
10603 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
10604 == POINTER_PLUS_EXPR
10605 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
10606 (OMP_CLAUSE_DECL (c), 0), 0))
10607 == ADDR_EXPR)))))
10608 && omp_check_private (ctx, decl, false))
10609 {
10610 error ("%s variable %qE is private in outer context",
10611 check_non_private, DECL_NAME (decl));
10612 remove = true;
10613 }
10614 break;
10615
10616 case OMP_CLAUSE_DETACH:
10617 flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
10618 goto do_add;
10619
10620 case OMP_CLAUSE_IF:
10621 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
10622 && OMP_CLAUSE_IF_MODIFIER (c) != code)
10623 {
10624 const char *p[2];
10625 for (int i = 0; i < 2; i++)
10626 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
10627 {
10628 case VOID_CST: p[i] = "cancel"; break;
10629 case OMP_PARALLEL: p[i] = "parallel"; break;
10630 case OMP_SIMD: p[i] = "simd"; break;
10631 case OMP_TASK: p[i] = "task"; break;
10632 case OMP_TASKLOOP: p[i] = "taskloop"; break;
10633 case OMP_TARGET_DATA: p[i] = "target data"; break;
10634 case OMP_TARGET: p[i] = "target"; break;
10635 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
10636 case OMP_TARGET_ENTER_DATA:
10637 p[i] = "target enter data"; break;
10638 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
10639 default: gcc_unreachable ();
10640 }
10641 error_at (OMP_CLAUSE_LOCATION (c),
10642 "expected %qs %<if%> clause modifier rather than %qs",
10643 p[0], p[1]);
10644 remove = true;
10645 }
10646 /* Fall through. */
10647
10648 case OMP_CLAUSE_FINAL:
10649 OMP_CLAUSE_OPERAND (c, 0)
10650 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
10651 /* Fall through. */
10652
10653 case OMP_CLAUSE_NUM_TEAMS:
10654 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS
10655 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
10656 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
10657 {
10658 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
10659 {
10660 remove = true;
10661 break;
10662 }
10663 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
10664 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c),
10665 pre_p, NULL, true);
10666 }
10667 /* Fall through. */
10668
10669 case OMP_CLAUSE_SCHEDULE:
10670 case OMP_CLAUSE_NUM_THREADS:
10671 case OMP_CLAUSE_THREAD_LIMIT:
10672 case OMP_CLAUSE_DIST_SCHEDULE:
10673 case OMP_CLAUSE_DEVICE:
10674 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
10675 && OMP_CLAUSE_DEVICE_ANCESTOR (c))
10676 {
10677 if (code != OMP_TARGET)
10678 {
10679 error_at (OMP_CLAUSE_LOCATION (c),
10680 "%<device%> clause with %<ancestor%> is only "
10681 "allowed on %<target%> construct");
10682 remove = true;
10683 break;
10684 }
10685
10686 tree clauses = *orig_list_p;
10687 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
10688 if (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEVICE
10689 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_FIRSTPRIVATE
10690 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_PRIVATE
10691 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEFAULTMAP
10692 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_MAP
10693 )
10694 {
10695 error_at (OMP_CLAUSE_LOCATION (c),
10696 "with %<ancestor%>, only the %<device%>, "
10697 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
10698 "and %<map%> clauses may appear on the "
10699 "construct");
10700 remove = true;
10701 break;
10702 }
10703 }
10704 /* Fall through. */
10705
10706 case OMP_CLAUSE_PRIORITY:
10707 case OMP_CLAUSE_GRAINSIZE:
10708 case OMP_CLAUSE_NUM_TASKS:
10709 case OMP_CLAUSE_FILTER:
10710 case OMP_CLAUSE_HINT:
10711 case OMP_CLAUSE_ASYNC:
10712 case OMP_CLAUSE_WAIT:
10713 case OMP_CLAUSE_NUM_GANGS:
10714 case OMP_CLAUSE_NUM_WORKERS:
10715 case OMP_CLAUSE_VECTOR_LENGTH:
10716 case OMP_CLAUSE_WORKER:
10717 case OMP_CLAUSE_VECTOR:
10718 if (OMP_CLAUSE_OPERAND (c, 0)
10719 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c, 0)))
10720 {
10721 if (error_operand_p (OMP_CLAUSE_OPERAND (c, 0)))
10722 {
10723 remove = true;
10724 break;
10725 }
10726 /* All these clauses care about value, not a particular decl,
10727 so try to force it into a SSA_NAME or fresh temporary. */
10728 OMP_CLAUSE_OPERAND (c, 0)
10729 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c, 0),
10730 pre_p, NULL, true);
10731 }
10732 break;
10733
10734 case OMP_CLAUSE_GANG:
10735 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
10736 is_gimple_val, fb_rvalue) == GS_ERROR)
10737 remove = true;
10738 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
10739 is_gimple_val, fb_rvalue) == GS_ERROR)
10740 remove = true;
10741 break;
10742
10743 case OMP_CLAUSE_NOWAIT:
10744 nowait = 1;
10745 break;
10746
10747 case OMP_CLAUSE_ORDERED:
10748 case OMP_CLAUSE_UNTIED:
10749 case OMP_CLAUSE_COLLAPSE:
10750 case OMP_CLAUSE_TILE:
10751 case OMP_CLAUSE_AUTO:
10752 case OMP_CLAUSE_SEQ:
10753 case OMP_CLAUSE_INDEPENDENT:
10754 case OMP_CLAUSE_MERGEABLE:
10755 case OMP_CLAUSE_PROC_BIND:
10756 case OMP_CLAUSE_SAFELEN:
10757 case OMP_CLAUSE_SIMDLEN:
10758 case OMP_CLAUSE_NOGROUP:
10759 case OMP_CLAUSE_THREADS:
10760 case OMP_CLAUSE_SIMD:
10761 case OMP_CLAUSE_BIND:
10762 case OMP_CLAUSE_IF_PRESENT:
10763 case OMP_CLAUSE_FINALIZE:
10764 break;
10765
10766 case OMP_CLAUSE_ORDER:
10767 ctx->order_concurrent = true;
10768 break;
10769
10770 case OMP_CLAUSE_DEFAULTMAP:
10771 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
10772 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
10773 {
10774 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
10775 gdmkmin = GDMK_SCALAR;
10776 gdmkmax = GDMK_POINTER;
10777 break;
10778 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
10779 gdmkmin = GDMK_SCALAR;
10780 gdmkmax = GDMK_SCALAR_TARGET;
10781 break;
10782 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
10783 gdmkmin = gdmkmax = GDMK_AGGREGATE;
10784 break;
10785 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
10786 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
10787 break;
10788 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
10789 gdmkmin = gdmkmax = GDMK_POINTER;
10790 break;
10791 default:
10792 gcc_unreachable ();
10793 }
10794 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
10795 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
10796 {
10797 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
10798 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
10799 break;
10800 case OMP_CLAUSE_DEFAULTMAP_TO:
10801 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
10802 break;
10803 case OMP_CLAUSE_DEFAULTMAP_FROM:
10804 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
10805 break;
10806 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
10807 ctx->defaultmap[gdmk] = GOVD_MAP;
10808 break;
10809 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
10810 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
10811 break;
10812 case OMP_CLAUSE_DEFAULTMAP_NONE:
10813 ctx->defaultmap[gdmk] = 0;
10814 break;
10815 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
10816 switch (gdmk)
10817 {
10818 case GDMK_SCALAR:
10819 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
10820 break;
10821 case GDMK_SCALAR_TARGET:
10822 ctx->defaultmap[gdmk] = (lang_GNU_Fortran ()
10823 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
10824 break;
10825 case GDMK_AGGREGATE:
10826 case GDMK_ALLOCATABLE:
10827 ctx->defaultmap[gdmk] = GOVD_MAP;
10828 break;
10829 case GDMK_POINTER:
10830 ctx->defaultmap[gdmk] = GOVD_MAP;
10831 if (!lang_GNU_Fortran ())
10832 ctx->defaultmap[gdmk] |= GOVD_MAP_0LEN_ARRAY;
10833 break;
10834 default:
10835 gcc_unreachable ();
10836 }
10837 break;
10838 default:
10839 gcc_unreachable ();
10840 }
10841 break;
10842
10843 case OMP_CLAUSE_ALIGNED:
10844 decl = OMP_CLAUSE_DECL (c);
10845 if (error_operand_p (decl))
10846 {
10847 remove = true;
10848 break;
10849 }
10850 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
10851 is_gimple_val, fb_rvalue) == GS_ERROR)
10852 {
10853 remove = true;
10854 break;
10855 }
10856 if (!is_global_var (decl)
10857 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
10858 omp_add_variable (ctx, decl, GOVD_ALIGNED);
10859 break;
10860
10861 case OMP_CLAUSE_NONTEMPORAL:
10862 decl = OMP_CLAUSE_DECL (c);
10863 if (error_operand_p (decl))
10864 {
10865 remove = true;
10866 break;
10867 }
10868 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
10869 break;
10870
10871 case OMP_CLAUSE_ALLOCATE:
10872 decl = OMP_CLAUSE_DECL (c);
10873 if (error_operand_p (decl))
10874 {
10875 remove = true;
10876 break;
10877 }
10878 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
10879 is_gimple_val, fb_rvalue) == GS_ERROR)
10880 {
10881 remove = true;
10882 break;
10883 }
10884 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
10885 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
10886 == INTEGER_CST))
10887 ;
10888 else if (code == OMP_TASKLOOP
10889 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
10890 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
10891 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
10892 pre_p, NULL, false);
10893 break;
10894
10895 case OMP_CLAUSE_DEFAULT:
10896 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
10897 break;
10898
10899 case OMP_CLAUSE_INCLUSIVE:
10900 case OMP_CLAUSE_EXCLUSIVE:
10901 decl = OMP_CLAUSE_DECL (c);
10902 {
10903 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
10904 (splay_tree_key) decl);
10905 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
10906 {
10907 error_at (OMP_CLAUSE_LOCATION (c),
10908 "%qD specified in %qs clause but not in %<inscan%> "
10909 "%<reduction%> clause on the containing construct",
10910 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10911 remove = true;
10912 }
10913 else
10914 {
10915 n->value |= GOVD_REDUCTION_INSCAN;
10916 if (outer_ctx->region_type == ORT_SIMD
10917 && outer_ctx->outer_context
10918 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
10919 {
10920 n = splay_tree_lookup (outer_ctx->outer_context->variables,
10921 (splay_tree_key) decl);
10922 if (n && (n->value & GOVD_REDUCTION) != 0)
10923 n->value |= GOVD_REDUCTION_INSCAN;
10924 }
10925 }
10926 }
10927 break;
10928
10929 case OMP_CLAUSE_NOHOST:
10930 default:
10931 gcc_unreachable ();
10932 }
10933
10934 if (code == OACC_DATA
10935 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10936 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
10937 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10938 remove = true;
10939 if (remove)
10940 *list_p = OMP_CLAUSE_CHAIN (c);
10941 else
10942 list_p = &OMP_CLAUSE_CHAIN (c);
10943 }
10944
10945 ctx->clauses = *orig_list_p;
10946 gimplify_omp_ctxp = ctx;
10947 if (struct_seen_clause)
10948 delete struct_seen_clause;
10949 if (struct_map_to_clause)
10950 delete struct_map_to_clause;
10951 if (struct_deref_set)
10952 delete struct_deref_set;
10953 }
10954
10955 /* Return true if DECL is a candidate for shared to firstprivate
10956 optimization. We only consider non-addressable scalars, not
10957 too big, and not references. */
10958
10959 static bool
omp_shared_to_firstprivate_optimizable_decl_p(tree decl)10960 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
10961 {
10962 if (TREE_ADDRESSABLE (decl))
10963 return false;
10964 tree type = TREE_TYPE (decl);
10965 if (!is_gimple_reg_type (type)
10966 || TREE_CODE (type) == REFERENCE_TYPE
10967 || TREE_ADDRESSABLE (type))
10968 return false;
10969 /* Don't optimize too large decls, as each thread/task will have
10970 its own. */
10971 HOST_WIDE_INT len = int_size_in_bytes (type);
10972 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
10973 return false;
10974 if (omp_privatize_by_reference (decl))
10975 return false;
10976 return true;
10977 }
10978
10979 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
10980 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
10981 GOVD_WRITTEN in outer contexts. */
10982
10983 static void
omp_mark_stores(struct gimplify_omp_ctx * ctx,tree decl)10984 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
10985 {
10986 for (; ctx; ctx = ctx->outer_context)
10987 {
10988 splay_tree_node n = splay_tree_lookup (ctx->variables,
10989 (splay_tree_key) decl);
10990 if (n == NULL)
10991 continue;
10992 else if (n->value & GOVD_SHARED)
10993 {
10994 n->value |= GOVD_WRITTEN;
10995 return;
10996 }
10997 else if (n->value & GOVD_DATA_SHARE_CLASS)
10998 return;
10999 }
11000 }
11001
11002 /* Helper callback for walk_gimple_seq to discover possible stores
11003 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
11004 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
11005 for those. */
11006
11007 static tree
omp_find_stores_op(tree * tp,int * walk_subtrees,void * data)11008 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
11009 {
11010 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
11011
11012 *walk_subtrees = 0;
11013 if (!wi->is_lhs)
11014 return NULL_TREE;
11015
11016 tree op = *tp;
11017 do
11018 {
11019 if (handled_component_p (op))
11020 op = TREE_OPERAND (op, 0);
11021 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
11022 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
11023 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
11024 else
11025 break;
11026 }
11027 while (1);
11028 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
11029 return NULL_TREE;
11030
11031 omp_mark_stores (gimplify_omp_ctxp, op);
11032 return NULL_TREE;
11033 }
11034
11035 /* Helper callback for walk_gimple_seq to discover possible stores
11036 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
11037 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
11038 for those. */
11039
11040 static tree
omp_find_stores_stmt(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)11041 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
11042 bool *handled_ops_p,
11043 struct walk_stmt_info *wi)
11044 {
11045 gimple *stmt = gsi_stmt (*gsi_p);
11046 switch (gimple_code (stmt))
11047 {
11048 /* Don't recurse on OpenMP constructs for which
11049 gimplify_adjust_omp_clauses already handled the bodies,
11050 except handle gimple_omp_for_pre_body. */
11051 case GIMPLE_OMP_FOR:
11052 *handled_ops_p = true;
11053 if (gimple_omp_for_pre_body (stmt))
11054 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
11055 omp_find_stores_stmt, omp_find_stores_op, wi);
11056 break;
11057 case GIMPLE_OMP_PARALLEL:
11058 case GIMPLE_OMP_TASK:
11059 case GIMPLE_OMP_SECTIONS:
11060 case GIMPLE_OMP_SINGLE:
11061 case GIMPLE_OMP_SCOPE:
11062 case GIMPLE_OMP_TARGET:
11063 case GIMPLE_OMP_TEAMS:
11064 case GIMPLE_OMP_CRITICAL:
11065 *handled_ops_p = true;
11066 break;
11067 default:
11068 break;
11069 }
11070 return NULL_TREE;
11071 }
11072
11073 struct gimplify_adjust_omp_clauses_data
11074 {
11075 tree *list_p;
11076 gimple_seq *pre_p;
11077 };
11078
11079 /* For all variables that were not actually used within the context,
11080 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
11081
11082 static int
gimplify_adjust_omp_clauses_1(splay_tree_node n,void * data)11083 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
11084 {
11085 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
11086 gimple_seq *pre_p
11087 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
11088 tree decl = (tree) n->key;
11089 unsigned flags = n->value;
11090 enum omp_clause_code code;
11091 tree clause;
11092 bool private_debug;
11093
11094 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
11095 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
11096 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
11097 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
11098 return 0;
11099 if ((flags & GOVD_SEEN) == 0)
11100 return 0;
11101 if ((flags & GOVD_MAP_HAS_ATTACHMENTS) != 0)
11102 return 0;
11103 if (flags & GOVD_DEBUG_PRIVATE)
11104 {
11105 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
11106 private_debug = true;
11107 }
11108 else if (flags & GOVD_MAP)
11109 private_debug = false;
11110 else
11111 private_debug
11112 = lang_hooks.decls.omp_private_debug_clause (decl,
11113 !!(flags & GOVD_SHARED));
11114 if (private_debug)
11115 code = OMP_CLAUSE_PRIVATE;
11116 else if (flags & GOVD_MAP)
11117 {
11118 code = OMP_CLAUSE_MAP;
11119 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
11120 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
11121 {
11122 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
11123 return 0;
11124 }
11125 if (VAR_P (decl)
11126 && DECL_IN_CONSTANT_POOL (decl)
11127 && !lookup_attribute ("omp declare target",
11128 DECL_ATTRIBUTES (decl)))
11129 {
11130 tree id = get_identifier ("omp declare target");
11131 DECL_ATTRIBUTES (decl)
11132 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
11133 varpool_node *node = varpool_node::get (decl);
11134 if (node)
11135 {
11136 node->offloadable = 1;
11137 if (ENABLE_OFFLOADING)
11138 g->have_offload = true;
11139 }
11140 }
11141 }
11142 else if (flags & GOVD_SHARED)
11143 {
11144 if (is_global_var (decl))
11145 {
11146 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
11147 while (ctx != NULL)
11148 {
11149 splay_tree_node on
11150 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11151 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
11152 | GOVD_PRIVATE | GOVD_REDUCTION
11153 | GOVD_LINEAR | GOVD_MAP)) != 0)
11154 break;
11155 ctx = ctx->outer_context;
11156 }
11157 if (ctx == NULL)
11158 return 0;
11159 }
11160 code = OMP_CLAUSE_SHARED;
11161 /* Don't optimize shared into firstprivate for read-only vars
11162 on tasks with depend clause, we shouldn't try to copy them
11163 until the dependencies are satisfied. */
11164 if (gimplify_omp_ctxp->has_depend)
11165 flags |= GOVD_WRITTEN;
11166 }
11167 else if (flags & GOVD_PRIVATE)
11168 code = OMP_CLAUSE_PRIVATE;
11169 else if (flags & GOVD_FIRSTPRIVATE)
11170 {
11171 code = OMP_CLAUSE_FIRSTPRIVATE;
11172 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
11173 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
11174 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
11175 {
11176 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
11177 "%<target%> construct", decl);
11178 return 0;
11179 }
11180 }
11181 else if (flags & GOVD_LASTPRIVATE)
11182 code = OMP_CLAUSE_LASTPRIVATE;
11183 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
11184 return 0;
11185 else if (flags & GOVD_CONDTEMP)
11186 {
11187 code = OMP_CLAUSE__CONDTEMP_;
11188 gimple_add_tmp_var (decl);
11189 }
11190 else
11191 gcc_unreachable ();
11192
11193 if (((flags & GOVD_LASTPRIVATE)
11194 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
11195 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
11196 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
11197
11198 tree chain = *list_p;
11199 clause = build_omp_clause (input_location, code);
11200 OMP_CLAUSE_DECL (clause) = decl;
11201 OMP_CLAUSE_CHAIN (clause) = chain;
11202 if (private_debug)
11203 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
11204 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
11205 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
11206 else if (code == OMP_CLAUSE_SHARED
11207 && (flags & GOVD_WRITTEN) == 0
11208 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
11209 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
11210 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
11211 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
11212 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
11213 {
11214 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
11215 OMP_CLAUSE_DECL (nc) = decl;
11216 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
11217 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
11218 OMP_CLAUSE_DECL (clause)
11219 = build_simple_mem_ref_loc (input_location, decl);
11220 OMP_CLAUSE_DECL (clause)
11221 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
11222 build_int_cst (build_pointer_type (char_type_node), 0));
11223 OMP_CLAUSE_SIZE (clause) = size_zero_node;
11224 OMP_CLAUSE_SIZE (nc) = size_zero_node;
11225 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
11226 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
11227 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
11228 OMP_CLAUSE_CHAIN (nc) = chain;
11229 OMP_CLAUSE_CHAIN (clause) = nc;
11230 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11231 gimplify_omp_ctxp = ctx->outer_context;
11232 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
11233 pre_p, NULL, is_gimple_val, fb_rvalue);
11234 gimplify_omp_ctxp = ctx;
11235 }
11236 else if (code == OMP_CLAUSE_MAP)
11237 {
11238 int kind;
11239 /* Not all combinations of these GOVD_MAP flags are actually valid. */
11240 switch (flags & (GOVD_MAP_TO_ONLY
11241 | GOVD_MAP_FORCE
11242 | GOVD_MAP_FORCE_PRESENT
11243 | GOVD_MAP_ALLOC_ONLY
11244 | GOVD_MAP_FROM_ONLY))
11245 {
11246 case 0:
11247 kind = GOMP_MAP_TOFROM;
11248 break;
11249 case GOVD_MAP_FORCE:
11250 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
11251 break;
11252 case GOVD_MAP_TO_ONLY:
11253 kind = GOMP_MAP_TO;
11254 break;
11255 case GOVD_MAP_FROM_ONLY:
11256 kind = GOMP_MAP_FROM;
11257 break;
11258 case GOVD_MAP_ALLOC_ONLY:
11259 kind = GOMP_MAP_ALLOC;
11260 break;
11261 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
11262 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
11263 break;
11264 case GOVD_MAP_FORCE_PRESENT:
11265 kind = GOMP_MAP_FORCE_PRESENT;
11266 break;
11267 default:
11268 gcc_unreachable ();
11269 }
11270 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
11271 /* Setting of the implicit flag for the runtime is currently disabled for
11272 OpenACC. */
11273 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
11274 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
11275 if (DECL_SIZE (decl)
11276 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
11277 {
11278 tree decl2 = DECL_VALUE_EXPR (decl);
11279 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
11280 decl2 = TREE_OPERAND (decl2, 0);
11281 gcc_assert (DECL_P (decl2));
11282 tree mem = build_simple_mem_ref (decl2);
11283 OMP_CLAUSE_DECL (clause) = mem;
11284 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
11285 if (gimplify_omp_ctxp->outer_context)
11286 {
11287 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
11288 omp_notice_variable (ctx, decl2, true);
11289 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
11290 }
11291 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
11292 OMP_CLAUSE_MAP);
11293 OMP_CLAUSE_DECL (nc) = decl;
11294 OMP_CLAUSE_SIZE (nc) = size_zero_node;
11295 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
11296 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
11297 else
11298 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
11299 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
11300 OMP_CLAUSE_CHAIN (clause) = nc;
11301 }
11302 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
11303 && omp_privatize_by_reference (decl))
11304 {
11305 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
11306 OMP_CLAUSE_SIZE (clause)
11307 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
11308 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11309 gimplify_omp_ctxp = ctx->outer_context;
11310 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
11311 pre_p, NULL, is_gimple_val, fb_rvalue);
11312 gimplify_omp_ctxp = ctx;
11313 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
11314 OMP_CLAUSE_MAP);
11315 OMP_CLAUSE_DECL (nc) = decl;
11316 OMP_CLAUSE_SIZE (nc) = size_zero_node;
11317 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
11318 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
11319 OMP_CLAUSE_CHAIN (clause) = nc;
11320 }
11321 else
11322 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
11323 }
11324 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
11325 {
11326 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
11327 OMP_CLAUSE_DECL (nc) = decl;
11328 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
11329 OMP_CLAUSE_CHAIN (nc) = chain;
11330 OMP_CLAUSE_CHAIN (clause) = nc;
11331 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11332 gimplify_omp_ctxp = ctx->outer_context;
11333 lang_hooks.decls.omp_finish_clause (nc, pre_p,
11334 (ctx->region_type & ORT_ACC) != 0);
11335 gimplify_omp_ctxp = ctx;
11336 }
11337 *list_p = clause;
11338 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11339 gimplify_omp_ctxp = ctx->outer_context;
11340 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
11341 in simd. Those are only added for the local vars inside of simd body
11342 and they don't need to be e.g. default constructible. */
11343 if (code != OMP_CLAUSE_PRIVATE || ctx->region_type != ORT_SIMD)
11344 lang_hooks.decls.omp_finish_clause (clause, pre_p,
11345 (ctx->region_type & ORT_ACC) != 0);
11346 if (gimplify_omp_ctxp)
11347 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
11348 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
11349 && DECL_P (OMP_CLAUSE_SIZE (clause)))
11350 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
11351 true);
11352 gimplify_omp_ctxp = ctx;
11353 return 0;
11354 }
11355
11356 static void
gimplify_adjust_omp_clauses(gimple_seq * pre_p,gimple_seq body,tree * list_p,enum tree_code code)11357 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
11358 enum tree_code code)
11359 {
11360 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11361 tree *orig_list_p = list_p;
11362 tree c, decl;
11363 bool has_inscan_reductions = false;
11364
11365 if (body)
11366 {
11367 struct gimplify_omp_ctx *octx;
11368 for (octx = ctx; octx; octx = octx->outer_context)
11369 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
11370 break;
11371 if (octx)
11372 {
11373 struct walk_stmt_info wi;
11374 memset (&wi, 0, sizeof (wi));
11375 walk_gimple_seq (body, omp_find_stores_stmt,
11376 omp_find_stores_op, &wi);
11377 }
11378 }
11379
11380 if (ctx->add_safelen1)
11381 {
11382 /* If there are VLAs in the body of simd loop, prevent
11383 vectorization. */
11384 gcc_assert (ctx->region_type == ORT_SIMD);
11385 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
11386 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
11387 OMP_CLAUSE_CHAIN (c) = *list_p;
11388 *list_p = c;
11389 list_p = &OMP_CLAUSE_CHAIN (c);
11390 }
11391
11392 if (ctx->region_type == ORT_WORKSHARE
11393 && ctx->outer_context
11394 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
11395 {
11396 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
11397 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11398 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
11399 {
11400 decl = OMP_CLAUSE_DECL (c);
11401 splay_tree_node n
11402 = splay_tree_lookup (ctx->outer_context->variables,
11403 (splay_tree_key) decl);
11404 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
11405 (splay_tree_key) decl));
11406 omp_add_variable (ctx, decl, n->value);
11407 tree c2 = copy_node (c);
11408 OMP_CLAUSE_CHAIN (c2) = *list_p;
11409 *list_p = c2;
11410 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
11411 continue;
11412 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11413 OMP_CLAUSE_FIRSTPRIVATE);
11414 OMP_CLAUSE_DECL (c2) = decl;
11415 OMP_CLAUSE_CHAIN (c2) = *list_p;
11416 *list_p = c2;
11417 }
11418 }
11419 while ((c = *list_p) != NULL)
11420 {
11421 splay_tree_node n;
11422 bool remove = false;
11423
11424 switch (OMP_CLAUSE_CODE (c))
11425 {
11426 case OMP_CLAUSE_FIRSTPRIVATE:
11427 if ((ctx->region_type & ORT_TARGET)
11428 && (ctx->region_type & ORT_ACC) == 0
11429 && TYPE_ATOMIC (strip_array_types
11430 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
11431 {
11432 error_at (OMP_CLAUSE_LOCATION (c),
11433 "%<_Atomic%> %qD in %<firstprivate%> clause on "
11434 "%<target%> construct", OMP_CLAUSE_DECL (c));
11435 remove = true;
11436 break;
11437 }
11438 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11439 {
11440 decl = OMP_CLAUSE_DECL (c);
11441 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11442 if ((n->value & GOVD_MAP) != 0)
11443 {
11444 remove = true;
11445 break;
11446 }
11447 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) = 0;
11448 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 0;
11449 }
11450 /* FALLTHRU */
11451 case OMP_CLAUSE_PRIVATE:
11452 case OMP_CLAUSE_SHARED:
11453 case OMP_CLAUSE_LINEAR:
11454 decl = OMP_CLAUSE_DECL (c);
11455 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11456 remove = !(n->value & GOVD_SEEN);
11457 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
11458 && code == OMP_PARALLEL
11459 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11460 remove = true;
11461 if (! remove)
11462 {
11463 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
11464 if ((n->value & GOVD_DEBUG_PRIVATE)
11465 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
11466 {
11467 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
11468 || ((n->value & GOVD_DATA_SHARE_CLASS)
11469 == GOVD_SHARED));
11470 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
11471 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
11472 }
11473 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
11474 && ctx->has_depend
11475 && DECL_P (decl))
11476 n->value |= GOVD_WRITTEN;
11477 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
11478 && (n->value & GOVD_WRITTEN) == 0
11479 && DECL_P (decl)
11480 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
11481 OMP_CLAUSE_SHARED_READONLY (c) = 1;
11482 else if (DECL_P (decl)
11483 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
11484 && (n->value & GOVD_WRITTEN) != 0)
11485 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11486 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
11487 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
11488 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
11489 }
11490 else
11491 n->value &= ~GOVD_EXPLICIT;
11492 break;
11493
11494 case OMP_CLAUSE_LASTPRIVATE:
11495 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
11496 accurately reflect the presence of a FIRSTPRIVATE clause. */
11497 decl = OMP_CLAUSE_DECL (c);
11498 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11499 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
11500 = (n->value & GOVD_FIRSTPRIVATE) != 0;
11501 if (code == OMP_DISTRIBUTE
11502 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
11503 {
11504 remove = true;
11505 error_at (OMP_CLAUSE_LOCATION (c),
11506 "same variable used in %<firstprivate%> and "
11507 "%<lastprivate%> clauses on %<distribute%> "
11508 "construct");
11509 }
11510 if (!remove
11511 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11512 && DECL_P (decl)
11513 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
11514 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
11515 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
11516 remove = true;
11517 break;
11518
11519 case OMP_CLAUSE_ALIGNED:
11520 decl = OMP_CLAUSE_DECL (c);
11521 if (!is_global_var (decl))
11522 {
11523 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11524 remove = n == NULL || !(n->value & GOVD_SEEN);
11525 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
11526 {
11527 struct gimplify_omp_ctx *octx;
11528 if (n != NULL
11529 && (n->value & (GOVD_DATA_SHARE_CLASS
11530 & ~GOVD_FIRSTPRIVATE)))
11531 remove = true;
11532 else
11533 for (octx = ctx->outer_context; octx;
11534 octx = octx->outer_context)
11535 {
11536 n = splay_tree_lookup (octx->variables,
11537 (splay_tree_key) decl);
11538 if (n == NULL)
11539 continue;
11540 if (n->value & GOVD_LOCAL)
11541 break;
11542 /* We have to avoid assigning a shared variable
11543 to itself when trying to add
11544 __builtin_assume_aligned. */
11545 if (n->value & GOVD_SHARED)
11546 {
11547 remove = true;
11548 break;
11549 }
11550 }
11551 }
11552 }
11553 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
11554 {
11555 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11556 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
11557 remove = true;
11558 }
11559 break;
11560
11561 case OMP_CLAUSE_HAS_DEVICE_ADDR:
11562 decl = OMP_CLAUSE_DECL (c);
11563 while (TREE_CODE (decl) == INDIRECT_REF
11564 || TREE_CODE (decl) == ARRAY_REF)
11565 decl = TREE_OPERAND (decl, 0);
11566 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11567 remove = n == NULL || !(n->value & GOVD_SEEN);
11568 break;
11569
11570 case OMP_CLAUSE_IS_DEVICE_PTR:
11571 case OMP_CLAUSE_NONTEMPORAL:
11572 decl = OMP_CLAUSE_DECL (c);
11573 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11574 remove = n == NULL || !(n->value & GOVD_SEEN);
11575 break;
11576
11577 case OMP_CLAUSE_MAP:
11578 if (code == OMP_TARGET_EXIT_DATA
11579 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
11580 {
11581 remove = true;
11582 break;
11583 }
11584 decl = OMP_CLAUSE_DECL (c);
11585 /* Data clauses associated with reductions must be
11586 compatible with present_or_copy. Warn and adjust the clause
11587 if that is not the case. */
11588 if (ctx->region_type == ORT_ACC_PARALLEL
11589 || ctx->region_type == ORT_ACC_SERIAL)
11590 {
11591 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
11592 n = NULL;
11593
11594 if (DECL_P (t))
11595 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
11596
11597 if (n && (n->value & GOVD_REDUCTION))
11598 {
11599 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
11600
11601 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
11602 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
11603 && kind != GOMP_MAP_FORCE_PRESENT
11604 && kind != GOMP_MAP_POINTER)
11605 {
11606 warning_at (OMP_CLAUSE_LOCATION (c), 0,
11607 "incompatible data clause with reduction "
11608 "on %qE; promoting to %<present_or_copy%>",
11609 DECL_NAME (t));
11610 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
11611 }
11612 }
11613 }
11614 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
11615 && (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
11616 {
11617 remove = true;
11618 break;
11619 }
11620 if (!DECL_P (decl))
11621 {
11622 if ((ctx->region_type & ORT_TARGET) != 0
11623 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
11624 {
11625 if (TREE_CODE (decl) == INDIRECT_REF
11626 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
11627 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
11628 == REFERENCE_TYPE))
11629 decl = TREE_OPERAND (decl, 0);
11630 if (TREE_CODE (decl) == COMPONENT_REF)
11631 {
11632 while (TREE_CODE (decl) == COMPONENT_REF)
11633 decl = TREE_OPERAND (decl, 0);
11634 if (DECL_P (decl))
11635 {
11636 n = splay_tree_lookup (ctx->variables,
11637 (splay_tree_key) decl);
11638 if (!(n->value & GOVD_SEEN))
11639 remove = true;
11640 }
11641 }
11642 }
11643 break;
11644 }
11645 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11646 if ((ctx->region_type & ORT_TARGET) != 0
11647 && !(n->value & GOVD_SEEN)
11648 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
11649 && (!is_global_var (decl)
11650 || !lookup_attribute ("omp declare target link",
11651 DECL_ATTRIBUTES (decl))))
11652 {
11653 remove = true;
11654 /* For struct element mapping, if struct is never referenced
11655 in target block and none of the mapping has always modifier,
11656 remove all the struct element mappings, which immediately
11657 follow the GOMP_MAP_STRUCT map clause. */
11658 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
11659 {
11660 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
11661 while (cnt--)
11662 OMP_CLAUSE_CHAIN (c)
11663 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
11664 }
11665 }
11666 else if (DECL_SIZE (decl)
11667 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
11668 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
11669 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
11670 && (OMP_CLAUSE_MAP_KIND (c)
11671 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11672 {
11673 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
11674 for these, TREE_CODE (DECL_SIZE (decl)) will always be
11675 INTEGER_CST. */
11676 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
11677
11678 tree decl2 = DECL_VALUE_EXPR (decl);
11679 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
11680 decl2 = TREE_OPERAND (decl2, 0);
11681 gcc_assert (DECL_P (decl2));
11682 tree mem = build_simple_mem_ref (decl2);
11683 OMP_CLAUSE_DECL (c) = mem;
11684 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
11685 if (ctx->outer_context)
11686 {
11687 omp_notice_variable (ctx->outer_context, decl2, true);
11688 omp_notice_variable (ctx->outer_context,
11689 OMP_CLAUSE_SIZE (c), true);
11690 }
11691 if (((ctx->region_type & ORT_TARGET) != 0
11692 || !ctx->target_firstprivatize_array_bases)
11693 && ((n->value & GOVD_SEEN) == 0
11694 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
11695 {
11696 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11697 OMP_CLAUSE_MAP);
11698 OMP_CLAUSE_DECL (nc) = decl;
11699 OMP_CLAUSE_SIZE (nc) = size_zero_node;
11700 if (ctx->target_firstprivatize_array_bases)
11701 OMP_CLAUSE_SET_MAP_KIND (nc,
11702 GOMP_MAP_FIRSTPRIVATE_POINTER);
11703 else
11704 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
11705 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
11706 OMP_CLAUSE_CHAIN (c) = nc;
11707 c = nc;
11708 }
11709 }
11710 else
11711 {
11712 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11713 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
11714 gcc_assert ((n->value & GOVD_SEEN) == 0
11715 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
11716 == 0));
11717 }
11718 break;
11719
11720 case OMP_CLAUSE_TO:
11721 case OMP_CLAUSE_FROM:
11722 case OMP_CLAUSE__CACHE_:
11723 decl = OMP_CLAUSE_DECL (c);
11724 if (!DECL_P (decl))
11725 break;
11726 if (DECL_SIZE (decl)
11727 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
11728 {
11729 tree decl2 = DECL_VALUE_EXPR (decl);
11730 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
11731 decl2 = TREE_OPERAND (decl2, 0);
11732 gcc_assert (DECL_P (decl2));
11733 tree mem = build_simple_mem_ref (decl2);
11734 OMP_CLAUSE_DECL (c) = mem;
11735 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
11736 if (ctx->outer_context)
11737 {
11738 omp_notice_variable (ctx->outer_context, decl2, true);
11739 omp_notice_variable (ctx->outer_context,
11740 OMP_CLAUSE_SIZE (c), true);
11741 }
11742 }
11743 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11744 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
11745 break;
11746
11747 case OMP_CLAUSE_REDUCTION:
11748 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
11749 {
11750 decl = OMP_CLAUSE_DECL (c);
11751 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11752 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
11753 {
11754 remove = true;
11755 error_at (OMP_CLAUSE_LOCATION (c),
11756 "%qD specified in %<inscan%> %<reduction%> clause "
11757 "but not in %<scan%> directive clause", decl);
11758 break;
11759 }
11760 has_inscan_reductions = true;
11761 }
11762 /* FALLTHRU */
11763 case OMP_CLAUSE_IN_REDUCTION:
11764 case OMP_CLAUSE_TASK_REDUCTION:
11765 decl = OMP_CLAUSE_DECL (c);
11766 /* OpenACC reductions need a present_or_copy data clause.
11767 Add one if necessary. Emit error when the reduction is private. */
11768 if (ctx->region_type == ORT_ACC_PARALLEL
11769 || ctx->region_type == ORT_ACC_SERIAL)
11770 {
11771 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11772 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
11773 {
11774 remove = true;
11775 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
11776 "reduction on %qE", DECL_NAME (decl));
11777 }
11778 else if ((n->value & GOVD_MAP) == 0)
11779 {
11780 tree next = OMP_CLAUSE_CHAIN (c);
11781 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
11782 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
11783 OMP_CLAUSE_DECL (nc) = decl;
11784 OMP_CLAUSE_CHAIN (c) = nc;
11785 lang_hooks.decls.omp_finish_clause (nc, pre_p,
11786 (ctx->region_type
11787 & ORT_ACC) != 0);
11788 while (1)
11789 {
11790 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
11791 if (OMP_CLAUSE_CHAIN (nc) == NULL)
11792 break;
11793 nc = OMP_CLAUSE_CHAIN (nc);
11794 }
11795 OMP_CLAUSE_CHAIN (nc) = next;
11796 n->value |= GOVD_MAP;
11797 }
11798 }
11799 if (DECL_P (decl)
11800 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
11801 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
11802 break;
11803
11804 case OMP_CLAUSE_ALLOCATE:
11805 decl = OMP_CLAUSE_DECL (c);
11806 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11807 if (n != NULL && !(n->value & GOVD_SEEN))
11808 {
11809 if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
11810 != 0
11811 && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
11812 remove = true;
11813 }
11814 if (!remove
11815 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
11816 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
11817 && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
11818 || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
11819 || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
11820 {
11821 tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
11822 n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
11823 if (n == NULL)
11824 {
11825 enum omp_clause_default_kind default_kind
11826 = ctx->default_kind;
11827 ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
11828 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
11829 true);
11830 ctx->default_kind = default_kind;
11831 }
11832 else
11833 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
11834 true);
11835 }
11836 break;
11837
11838 case OMP_CLAUSE_COPYIN:
11839 case OMP_CLAUSE_COPYPRIVATE:
11840 case OMP_CLAUSE_IF:
11841 case OMP_CLAUSE_NUM_THREADS:
11842 case OMP_CLAUSE_NUM_TEAMS:
11843 case OMP_CLAUSE_THREAD_LIMIT:
11844 case OMP_CLAUSE_DIST_SCHEDULE:
11845 case OMP_CLAUSE_DEVICE:
11846 case OMP_CLAUSE_SCHEDULE:
11847 case OMP_CLAUSE_NOWAIT:
11848 case OMP_CLAUSE_ORDERED:
11849 case OMP_CLAUSE_DEFAULT:
11850 case OMP_CLAUSE_UNTIED:
11851 case OMP_CLAUSE_COLLAPSE:
11852 case OMP_CLAUSE_FINAL:
11853 case OMP_CLAUSE_MERGEABLE:
11854 case OMP_CLAUSE_PROC_BIND:
11855 case OMP_CLAUSE_SAFELEN:
11856 case OMP_CLAUSE_SIMDLEN:
11857 case OMP_CLAUSE_DEPEND:
11858 case OMP_CLAUSE_PRIORITY:
11859 case OMP_CLAUSE_GRAINSIZE:
11860 case OMP_CLAUSE_NUM_TASKS:
11861 case OMP_CLAUSE_NOGROUP:
11862 case OMP_CLAUSE_THREADS:
11863 case OMP_CLAUSE_SIMD:
11864 case OMP_CLAUSE_FILTER:
11865 case OMP_CLAUSE_HINT:
11866 case OMP_CLAUSE_DEFAULTMAP:
11867 case OMP_CLAUSE_ORDER:
11868 case OMP_CLAUSE_BIND:
11869 case OMP_CLAUSE_DETACH:
11870 case OMP_CLAUSE_USE_DEVICE_PTR:
11871 case OMP_CLAUSE_USE_DEVICE_ADDR:
11872 case OMP_CLAUSE_ASYNC:
11873 case OMP_CLAUSE_WAIT:
11874 case OMP_CLAUSE_INDEPENDENT:
11875 case OMP_CLAUSE_NUM_GANGS:
11876 case OMP_CLAUSE_NUM_WORKERS:
11877 case OMP_CLAUSE_VECTOR_LENGTH:
11878 case OMP_CLAUSE_GANG:
11879 case OMP_CLAUSE_WORKER:
11880 case OMP_CLAUSE_VECTOR:
11881 case OMP_CLAUSE_AUTO:
11882 case OMP_CLAUSE_SEQ:
11883 case OMP_CLAUSE_TILE:
11884 case OMP_CLAUSE_IF_PRESENT:
11885 case OMP_CLAUSE_FINALIZE:
11886 case OMP_CLAUSE_INCLUSIVE:
11887 case OMP_CLAUSE_EXCLUSIVE:
11888 break;
11889
11890 case OMP_CLAUSE_NOHOST:
11891 default:
11892 gcc_unreachable ();
11893 }
11894
11895 if (remove)
11896 *list_p = OMP_CLAUSE_CHAIN (c);
11897 else
11898 list_p = &OMP_CLAUSE_CHAIN (c);
11899 }
11900
11901 /* Add in any implicit data sharing. */
11902 struct gimplify_adjust_omp_clauses_data data;
11903 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
11904 {
11905 /* OpenMP. Implicit clauses are added at the start of the clause list,
11906 but after any non-map clauses. */
11907 tree *implicit_add_list_p = orig_list_p;
11908 while (*implicit_add_list_p
11909 && OMP_CLAUSE_CODE (*implicit_add_list_p) != OMP_CLAUSE_MAP)
11910 implicit_add_list_p = &OMP_CLAUSE_CHAIN (*implicit_add_list_p);
11911 data.list_p = implicit_add_list_p;
11912 }
11913 else
11914 /* OpenACC. */
11915 data.list_p = list_p;
11916 data.pre_p = pre_p;
11917 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
11918
11919 if (has_inscan_reductions)
11920 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
11921 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11922 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11923 {
11924 error_at (OMP_CLAUSE_LOCATION (c),
11925 "%<inscan%> %<reduction%> clause used together with "
11926 "%<linear%> clause for a variable other than loop "
11927 "iterator");
11928 break;
11929 }
11930
11931 gimplify_omp_ctxp = ctx->outer_context;
11932 delete_omp_context (ctx);
11933 }
11934
11935 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
11936 -1 if unknown yet (simd is involved, won't be known until vectorization)
11937 and 1 if they do. If SCORES is non-NULL, it should point to an array
11938 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
11939 of the CONSTRUCTS (position -1 if it will never match) followed by
11940 number of constructs in the OpenMP context construct trait. If the
11941 score depends on whether it will be in a declare simd clone or not,
11942 the function returns 2 and there will be two sets of the scores, the first
11943 one for the case that it is not in a declare simd clone, the other
11944 that it is in a declare simd clone. */
11945
11946 int
omp_construct_selector_matches(enum tree_code * constructs,int nconstructs,int * scores)11947 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
11948 int *scores)
11949 {
11950 int matched = 0, cnt = 0;
11951 bool simd_seen = false;
11952 bool target_seen = false;
11953 int declare_simd_cnt = -1;
11954 auto_vec<enum tree_code, 16> codes;
11955 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
11956 {
11957 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
11958 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
11959 == ORT_TARGET && ctx->code == OMP_TARGET)
11960 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
11961 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
11962 || (ctx->region_type == ORT_SIMD
11963 && ctx->code == OMP_SIMD
11964 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
11965 {
11966 ++cnt;
11967 if (scores)
11968 codes.safe_push (ctx->code);
11969 else if (matched < nconstructs && ctx->code == constructs[matched])
11970 {
11971 if (ctx->code == OMP_SIMD)
11972 {
11973 if (matched)
11974 return 0;
11975 simd_seen = true;
11976 }
11977 ++matched;
11978 }
11979 if (ctx->code == OMP_TARGET)
11980 {
11981 if (scores == NULL)
11982 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
11983 target_seen = true;
11984 break;
11985 }
11986 }
11987 else if (ctx->region_type == ORT_WORKSHARE
11988 && ctx->code == OMP_LOOP
11989 && ctx->outer_context
11990 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
11991 && ctx->outer_context->outer_context
11992 && ctx->outer_context->outer_context->code == OMP_LOOP
11993 && ctx->outer_context->outer_context->distribute)
11994 ctx = ctx->outer_context->outer_context;
11995 ctx = ctx->outer_context;
11996 }
11997 if (!target_seen
11998 && lookup_attribute ("omp declare simd",
11999 DECL_ATTRIBUTES (current_function_decl)))
12000 {
12001 /* Declare simd is a maybe case, it is supposed to be added only to the
12002 omp-simd-clone.cc added clones and not to the base function. */
12003 declare_simd_cnt = cnt++;
12004 if (scores)
12005 codes.safe_push (OMP_SIMD);
12006 else if (cnt == 0
12007 && constructs[0] == OMP_SIMD)
12008 {
12009 gcc_assert (matched == 0);
12010 simd_seen = true;
12011 if (++matched == nconstructs)
12012 return -1;
12013 }
12014 }
12015 if (tree attr = lookup_attribute ("omp declare variant variant",
12016 DECL_ATTRIBUTES (current_function_decl)))
12017 {
12018 enum tree_code variant_constructs[5];
12019 int variant_nconstructs = 0;
12020 if (!target_seen)
12021 variant_nconstructs
12022 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
12023 variant_constructs);
12024 for (int i = 0; i < variant_nconstructs; i++)
12025 {
12026 ++cnt;
12027 if (scores)
12028 codes.safe_push (variant_constructs[i]);
12029 else if (matched < nconstructs
12030 && variant_constructs[i] == constructs[matched])
12031 {
12032 if (variant_constructs[i] == OMP_SIMD)
12033 {
12034 if (matched)
12035 return 0;
12036 simd_seen = true;
12037 }
12038 ++matched;
12039 }
12040 }
12041 }
12042 if (!target_seen
12043 && lookup_attribute ("omp declare target block",
12044 DECL_ATTRIBUTES (current_function_decl)))
12045 {
12046 if (scores)
12047 codes.safe_push (OMP_TARGET);
12048 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
12049 ++matched;
12050 }
12051 if (scores)
12052 {
12053 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
12054 {
12055 int j = codes.length () - 1;
12056 for (int i = nconstructs - 1; i >= 0; i--)
12057 {
12058 while (j >= 0
12059 && (pass != 0 || declare_simd_cnt != j)
12060 && constructs[i] != codes[j])
12061 --j;
12062 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
12063 *scores++ = j - 1;
12064 else
12065 *scores++ = j;
12066 }
12067 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
12068 ? codes.length () - 1 : codes.length ());
12069 }
12070 return declare_simd_cnt == -1 ? 1 : 2;
12071 }
12072 if (matched == nconstructs)
12073 return simd_seen ? -1 : 1;
12074 return 0;
12075 }
12076
12077 /* Gimplify OACC_CACHE. */
12078
12079 static void
gimplify_oacc_cache(tree * expr_p,gimple_seq * pre_p)12080 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
12081 {
12082 tree expr = *expr_p;
12083
12084 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
12085 OACC_CACHE);
12086 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
12087 OACC_CACHE);
12088
12089 /* TODO: Do something sensible with this information. */
12090
12091 *expr_p = NULL_TREE;
12092 }
12093
12094 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
12095 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
12096 kind. The entry kind will replace the one in CLAUSE, while the exit
12097 kind will be used in a new omp_clause and returned to the caller. */
12098
12099 static tree
gimplify_oacc_declare_1(tree clause)12100 gimplify_oacc_declare_1 (tree clause)
12101 {
12102 HOST_WIDE_INT kind, new_op;
12103 bool ret = false;
12104 tree c = NULL;
12105
12106 kind = OMP_CLAUSE_MAP_KIND (clause);
12107
12108 switch (kind)
12109 {
12110 case GOMP_MAP_ALLOC:
12111 new_op = GOMP_MAP_RELEASE;
12112 ret = true;
12113 break;
12114
12115 case GOMP_MAP_FROM:
12116 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
12117 new_op = GOMP_MAP_FROM;
12118 ret = true;
12119 break;
12120
12121 case GOMP_MAP_TOFROM:
12122 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
12123 new_op = GOMP_MAP_FROM;
12124 ret = true;
12125 break;
12126
12127 case GOMP_MAP_DEVICE_RESIDENT:
12128 case GOMP_MAP_FORCE_DEVICEPTR:
12129 case GOMP_MAP_FORCE_PRESENT:
12130 case GOMP_MAP_LINK:
12131 case GOMP_MAP_POINTER:
12132 case GOMP_MAP_TO:
12133 break;
12134
12135 default:
12136 gcc_unreachable ();
12137 break;
12138 }
12139
12140 if (ret)
12141 {
12142 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
12143 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
12144 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
12145 }
12146
12147 return c;
12148 }
12149
12150 /* Gimplify OACC_DECLARE. */
12151
12152 static void
gimplify_oacc_declare(tree * expr_p,gimple_seq * pre_p)12153 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
12154 {
12155 tree expr = *expr_p;
12156 gomp_target *stmt;
12157 tree clauses, t, decl;
12158
12159 clauses = OACC_DECLARE_CLAUSES (expr);
12160
12161 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
12162 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
12163
12164 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
12165 {
12166 decl = OMP_CLAUSE_DECL (t);
12167
12168 if (TREE_CODE (decl) == MEM_REF)
12169 decl = TREE_OPERAND (decl, 0);
12170
12171 if (VAR_P (decl) && !is_oacc_declared (decl))
12172 {
12173 tree attr = get_identifier ("oacc declare target");
12174 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
12175 DECL_ATTRIBUTES (decl));
12176 }
12177
12178 if (VAR_P (decl)
12179 && !is_global_var (decl)
12180 && DECL_CONTEXT (decl) == current_function_decl)
12181 {
12182 tree c = gimplify_oacc_declare_1 (t);
12183 if (c)
12184 {
12185 if (oacc_declare_returns == NULL)
12186 oacc_declare_returns = new hash_map<tree, tree>;
12187
12188 oacc_declare_returns->put (decl, c);
12189 }
12190 }
12191
12192 if (gimplify_omp_ctxp)
12193 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
12194 }
12195
12196 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
12197 clauses);
12198
12199 gimplify_seq_add_stmt (pre_p, stmt);
12200
12201 *expr_p = NULL_TREE;
12202 }
12203
12204 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
12205 gimplification of the body, as well as scanning the body for used
12206 variables. We need to do this scan now, because variable-sized
12207 decls will be decomposed during gimplification. */
12208
12209 static void
gimplify_omp_parallel(tree * expr_p,gimple_seq * pre_p)12210 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
12211 {
12212 tree expr = *expr_p;
12213 gimple *g;
12214 gimple_seq body = NULL;
12215
12216 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
12217 OMP_PARALLEL_COMBINED (expr)
12218 ? ORT_COMBINED_PARALLEL
12219 : ORT_PARALLEL, OMP_PARALLEL);
12220
12221 push_gimplify_context ();
12222
12223 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
12224 if (gimple_code (g) == GIMPLE_BIND)
12225 pop_gimplify_context (g);
12226 else
12227 pop_gimplify_context (NULL);
12228
12229 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
12230 OMP_PARALLEL);
12231
12232 g = gimple_build_omp_parallel (body,
12233 OMP_PARALLEL_CLAUSES (expr),
12234 NULL_TREE, NULL_TREE);
12235 if (OMP_PARALLEL_COMBINED (expr))
12236 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
12237 gimplify_seq_add_stmt (pre_p, g);
12238 *expr_p = NULL_TREE;
12239 }
12240
12241 /* Gimplify the contents of an OMP_TASK statement. This involves
12242 gimplification of the body, as well as scanning the body for used
12243 variables. We need to do this scan now, because variable-sized
12244 decls will be decomposed during gimplification. */
12245
12246 static void
gimplify_omp_task(tree * expr_p,gimple_seq * pre_p)12247 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
12248 {
12249 tree expr = *expr_p;
12250 gimple *g;
12251 gimple_seq body = NULL;
12252
12253 if (OMP_TASK_BODY (expr) == NULL_TREE)
12254 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12255 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12256 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
12257 {
12258 error_at (OMP_CLAUSE_LOCATION (c),
12259 "%<mutexinoutset%> kind in %<depend%> clause on a "
12260 "%<taskwait%> construct");
12261 break;
12262 }
12263
12264 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
12265 omp_find_clause (OMP_TASK_CLAUSES (expr),
12266 OMP_CLAUSE_UNTIED)
12267 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
12268
12269 if (OMP_TASK_BODY (expr))
12270 {
12271 push_gimplify_context ();
12272
12273 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
12274 if (gimple_code (g) == GIMPLE_BIND)
12275 pop_gimplify_context (g);
12276 else
12277 pop_gimplify_context (NULL);
12278 }
12279
12280 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
12281 OMP_TASK);
12282
12283 g = gimple_build_omp_task (body,
12284 OMP_TASK_CLAUSES (expr),
12285 NULL_TREE, NULL_TREE,
12286 NULL_TREE, NULL_TREE, NULL_TREE);
12287 if (OMP_TASK_BODY (expr) == NULL_TREE)
12288 gimple_omp_task_set_taskwait_p (g, true);
12289 gimplify_seq_add_stmt (pre_p, g);
12290 *expr_p = NULL_TREE;
12291 }
12292
12293 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
12294 force it into a temporary initialized in PRE_P and add firstprivate clause
12295 to ORIG_FOR_STMT. */
12296
12297 static void
gimplify_omp_taskloop_expr(tree type,tree * tp,gimple_seq * pre_p,tree orig_for_stmt)12298 gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
12299 tree orig_for_stmt)
12300 {
12301 if (*tp == NULL || is_gimple_constant (*tp))
12302 return;
12303
12304 *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
12305 /* Reference to pointer conversion is considered useless,
12306 but is significant for firstprivate clause. Force it
12307 here. */
12308 if (type
12309 && TREE_CODE (type) == POINTER_TYPE
12310 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
12311 {
12312 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
12313 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
12314 gimplify_and_add (m, pre_p);
12315 *tp = v;
12316 }
12317
12318 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
12319 OMP_CLAUSE_DECL (c) = *tp;
12320 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
12321 OMP_FOR_CLAUSES (orig_for_stmt) = c;
12322 }
12323
12324 /* Gimplify the gross structure of an OMP_FOR statement. */
12325
12326 static enum gimplify_status
gimplify_omp_for(tree * expr_p,gimple_seq * pre_p)12327 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
12328 {
12329 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
12330 enum gimplify_status ret = GS_ALL_DONE;
12331 enum gimplify_status tret;
12332 gomp_for *gfor;
12333 gimple_seq for_body, for_pre_body;
12334 int i;
12335 bitmap has_decl_expr = NULL;
12336 enum omp_region_type ort = ORT_WORKSHARE;
12337 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
12338
12339 orig_for_stmt = for_stmt = *expr_p;
12340
12341 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
12342 != NULL_TREE);
12343 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
12344 {
12345 tree *data[4] = { NULL, NULL, NULL, NULL };
12346 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
12347 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
12348 find_combined_omp_for, data, NULL);
12349 if (inner_for_stmt == NULL_TREE)
12350 {
12351 gcc_assert (seen_error ());
12352 *expr_p = NULL_TREE;
12353 return GS_ERROR;
12354 }
12355 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
12356 {
12357 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
12358 &OMP_FOR_PRE_BODY (for_stmt));
12359 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
12360 }
12361 if (OMP_FOR_PRE_BODY (inner_for_stmt))
12362 {
12363 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
12364 &OMP_FOR_PRE_BODY (for_stmt));
12365 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
12366 }
12367
12368 if (data[0])
12369 {
12370 /* We have some statements or variable declarations in between
12371 the composite construct directives. Move them around the
12372 inner_for_stmt. */
12373 data[0] = expr_p;
12374 for (i = 0; i < 3; i++)
12375 if (data[i])
12376 {
12377 tree t = *data[i];
12378 if (i < 2 && data[i + 1] == &OMP_BODY (t))
12379 data[i + 1] = data[i];
12380 *data[i] = OMP_BODY (t);
12381 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
12382 NULL_TREE, make_node (BLOCK));
12383 OMP_BODY (t) = body;
12384 append_to_statement_list_force (inner_for_stmt,
12385 &BIND_EXPR_BODY (body));
12386 *data[3] = t;
12387 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
12388 gcc_assert (*data[3] == inner_for_stmt);
12389 }
12390 return GS_OK;
12391 }
12392
12393 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
12394 if (!loop_p
12395 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
12396 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
12397 i)) == TREE_LIST
12398 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
12399 i)))
12400 {
12401 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
12402 /* Class iterators aren't allowed on OMP_SIMD, so the only
12403 case we need to solve is distribute parallel for. They are
12404 allowed on the loop construct, but that is already handled
12405 in gimplify_omp_loop. */
12406 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
12407 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
12408 && data[1]);
12409 tree orig_decl = TREE_PURPOSE (orig);
12410 tree last = TREE_VALUE (orig);
12411 tree *pc;
12412 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
12413 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
12414 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
12415 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
12416 && OMP_CLAUSE_DECL (*pc) == orig_decl)
12417 break;
12418 if (*pc == NULL_TREE)
12419 {
12420 tree *spc;
12421 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
12422 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
12423 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
12424 && OMP_CLAUSE_DECL (*spc) == orig_decl)
12425 break;
12426 if (*spc)
12427 {
12428 tree c = *spc;
12429 *spc = OMP_CLAUSE_CHAIN (c);
12430 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
12431 *pc = c;
12432 }
12433 }
12434 if (*pc == NULL_TREE)
12435 ;
12436 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
12437 {
12438 /* private clause will appear only on inner_for_stmt.
12439 Change it into firstprivate, and add private clause
12440 on for_stmt. */
12441 tree c = copy_node (*pc);
12442 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
12443 OMP_FOR_CLAUSES (for_stmt) = c;
12444 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
12445 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
12446 }
12447 else
12448 {
12449 /* lastprivate clause will appear on both inner_for_stmt
12450 and for_stmt. Add firstprivate clause to
12451 inner_for_stmt. */
12452 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
12453 OMP_CLAUSE_FIRSTPRIVATE);
12454 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
12455 OMP_CLAUSE_CHAIN (c) = *pc;
12456 *pc = c;
12457 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
12458 }
12459 tree c = build_omp_clause (UNKNOWN_LOCATION,
12460 OMP_CLAUSE_FIRSTPRIVATE);
12461 OMP_CLAUSE_DECL (c) = last;
12462 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
12463 OMP_PARALLEL_CLAUSES (*data[1]) = c;
12464 c = build_omp_clause (UNKNOWN_LOCATION,
12465 *pc ? OMP_CLAUSE_SHARED
12466 : OMP_CLAUSE_FIRSTPRIVATE);
12467 OMP_CLAUSE_DECL (c) = orig_decl;
12468 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
12469 OMP_PARALLEL_CLAUSES (*data[1]) = c;
12470 }
12471 /* Similarly, take care of C++ range for temporaries, those should
12472 be firstprivate on OMP_PARALLEL if any. */
12473 if (data[1])
12474 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
12475 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
12476 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
12477 i)) == TREE_LIST
12478 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
12479 i)))
12480 {
12481 tree orig
12482 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
12483 tree v = TREE_CHAIN (orig);
12484 tree c = build_omp_clause (UNKNOWN_LOCATION,
12485 OMP_CLAUSE_FIRSTPRIVATE);
12486 /* First add firstprivate clause for the __for_end artificial
12487 decl. */
12488 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
12489 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
12490 == REFERENCE_TYPE)
12491 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
12492 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
12493 OMP_PARALLEL_CLAUSES (*data[1]) = c;
12494 if (TREE_VEC_ELT (v, 0))
12495 {
12496 /* And now the same for __for_range artificial decl if it
12497 exists. */
12498 c = build_omp_clause (UNKNOWN_LOCATION,
12499 OMP_CLAUSE_FIRSTPRIVATE);
12500 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
12501 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
12502 == REFERENCE_TYPE)
12503 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
12504 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
12505 OMP_PARALLEL_CLAUSES (*data[1]) = c;
12506 }
12507 }
12508 }
12509
12510 switch (TREE_CODE (for_stmt))
12511 {
12512 case OMP_FOR:
12513 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
12514 {
12515 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
12516 OMP_CLAUSE_SCHEDULE))
12517 error_at (EXPR_LOCATION (for_stmt),
12518 "%qs clause may not appear on non-rectangular %qs",
12519 "schedule", "for");
12520 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
12521 error_at (EXPR_LOCATION (for_stmt),
12522 "%qs clause may not appear on non-rectangular %qs",
12523 "ordered", "for");
12524 }
12525 break;
12526 case OMP_DISTRIBUTE:
12527 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
12528 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
12529 OMP_CLAUSE_DIST_SCHEDULE))
12530 error_at (EXPR_LOCATION (for_stmt),
12531 "%qs clause may not appear on non-rectangular %qs",
12532 "dist_schedule", "distribute");
12533 break;
12534 case OACC_LOOP:
12535 ort = ORT_ACC;
12536 break;
12537 case OMP_TASKLOOP:
12538 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
12539 ort = ORT_UNTIED_TASKLOOP;
12540 else
12541 ort = ORT_TASKLOOP;
12542 break;
12543 case OMP_SIMD:
12544 ort = ORT_SIMD;
12545 break;
12546 default:
12547 gcc_unreachable ();
12548 }
12549
12550 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
12551 clause for the IV. */
12552 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
12553 {
12554 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
12555 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12556 decl = TREE_OPERAND (t, 0);
12557 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12558 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12559 && OMP_CLAUSE_DECL (c) == decl)
12560 {
12561 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
12562 break;
12563 }
12564 }
12565
12566 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
12567 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
12568 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
12569 ? OMP_LOOP : TREE_CODE (for_stmt));
12570
12571 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
12572 gimplify_omp_ctxp->distribute = true;
12573
12574 /* Handle OMP_FOR_INIT. */
12575 for_pre_body = NULL;
12576 if ((ort == ORT_SIMD
12577 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
12578 && OMP_FOR_PRE_BODY (for_stmt))
12579 {
12580 has_decl_expr = BITMAP_ALLOC (NULL);
12581 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
12582 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
12583 == VAR_DECL)
12584 {
12585 t = OMP_FOR_PRE_BODY (for_stmt);
12586 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
12587 }
12588 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
12589 {
12590 tree_stmt_iterator si;
12591 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
12592 tsi_next (&si))
12593 {
12594 t = tsi_stmt (si);
12595 if (TREE_CODE (t) == DECL_EXPR
12596 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
12597 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
12598 }
12599 }
12600 }
12601 if (OMP_FOR_PRE_BODY (for_stmt))
12602 {
12603 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
12604 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
12605 else
12606 {
12607 struct gimplify_omp_ctx ctx;
12608 memset (&ctx, 0, sizeof (ctx));
12609 ctx.region_type = ORT_NONE;
12610 gimplify_omp_ctxp = &ctx;
12611 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
12612 gimplify_omp_ctxp = NULL;
12613 }
12614 }
12615 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
12616
12617 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
12618 for_stmt = inner_for_stmt;
12619
12620 /* For taskloop, need to gimplify the start, end and step before the
12621 taskloop, outside of the taskloop omp context. */
12622 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
12623 {
12624 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12625 {
12626 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12627 gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
12628 ? pre_p : &for_pre_body);
12629 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
12630 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
12631 {
12632 tree v = TREE_OPERAND (t, 1);
12633 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
12634 for_pre_p, orig_for_stmt);
12635 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
12636 for_pre_p, orig_for_stmt);
12637 }
12638 else
12639 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
12640 orig_for_stmt);
12641
12642 /* Handle OMP_FOR_COND. */
12643 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
12644 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
12645 {
12646 tree v = TREE_OPERAND (t, 1);
12647 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
12648 for_pre_p, orig_for_stmt);
12649 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
12650 for_pre_p, orig_for_stmt);
12651 }
12652 else
12653 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
12654 orig_for_stmt);
12655
12656 /* Handle OMP_FOR_INCR. */
12657 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12658 if (TREE_CODE (t) == MODIFY_EXPR)
12659 {
12660 decl = TREE_OPERAND (t, 0);
12661 t = TREE_OPERAND (t, 1);
12662 tree *tp = &TREE_OPERAND (t, 1);
12663 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
12664 tp = &TREE_OPERAND (t, 0);
12665
12666 gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
12667 orig_for_stmt);
12668 }
12669 }
12670
12671 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
12672 OMP_TASKLOOP);
12673 }
12674
12675 if (orig_for_stmt != for_stmt)
12676 gimplify_omp_ctxp->combined_loop = true;
12677
12678 for_body = NULL;
12679 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
12680 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
12681 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
12682 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
12683
12684 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
12685 bool is_doacross = false;
12686 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
12687 {
12688 is_doacross = true;
12689 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
12690 (OMP_FOR_INIT (for_stmt))
12691 * 2);
12692 }
12693 int collapse = 1, tile = 0;
12694 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
12695 if (c)
12696 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
12697 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
12698 if (c)
12699 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
12700 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ALLOCATE);
12701 hash_set<tree> *allocate_uids = NULL;
12702 if (c)
12703 {
12704 allocate_uids = new hash_set<tree>;
12705 for (; c; c = OMP_CLAUSE_CHAIN (c))
12706 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
12707 allocate_uids->add (OMP_CLAUSE_DECL (c));
12708 }
12709 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12710 {
12711 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12712 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12713 decl = TREE_OPERAND (t, 0);
12714 gcc_assert (DECL_P (decl));
12715 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
12716 || POINTER_TYPE_P (TREE_TYPE (decl)));
12717 if (is_doacross)
12718 {
12719 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
12720 {
12721 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12722 if (TREE_CODE (orig_decl) == TREE_LIST)
12723 {
12724 orig_decl = TREE_PURPOSE (orig_decl);
12725 if (!orig_decl)
12726 orig_decl = decl;
12727 }
12728 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
12729 }
12730 else
12731 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
12732 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
12733 }
12734
12735 if (for_stmt == orig_for_stmt)
12736 {
12737 tree orig_decl = decl;
12738 if (OMP_FOR_ORIG_DECLS (for_stmt))
12739 {
12740 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12741 if (TREE_CODE (orig_decl) == TREE_LIST)
12742 {
12743 orig_decl = TREE_PURPOSE (orig_decl);
12744 if (!orig_decl)
12745 orig_decl = decl;
12746 }
12747 }
12748 if (is_global_var (orig_decl) && DECL_THREAD_LOCAL_P (orig_decl))
12749 error_at (EXPR_LOCATION (for_stmt),
12750 "threadprivate iteration variable %qD", orig_decl);
12751 }
12752
12753 /* Make sure the iteration variable is private. */
12754 tree c = NULL_TREE;
12755 tree c2 = NULL_TREE;
12756 if (orig_for_stmt != for_stmt)
12757 {
12758 /* Preserve this information until we gimplify the inner simd. */
12759 if (has_decl_expr
12760 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
12761 TREE_PRIVATE (t) = 1;
12762 }
12763 else if (ort == ORT_SIMD)
12764 {
12765 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
12766 (splay_tree_key) decl);
12767 omp_is_private (gimplify_omp_ctxp, decl,
12768 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
12769 != 1));
12770 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
12771 {
12772 omp_notice_variable (gimplify_omp_ctxp, decl, true);
12773 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
12774 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
12775 OMP_CLAUSE_LASTPRIVATE);
12776 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
12777 OMP_CLAUSE_LASTPRIVATE))
12778 if (OMP_CLAUSE_DECL (c3) == decl)
12779 {
12780 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
12781 "conditional %<lastprivate%> on loop "
12782 "iterator %qD ignored", decl);
12783 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
12784 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
12785 }
12786 }
12787 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
12788 {
12789 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
12790 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
12791 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
12792 if ((has_decl_expr
12793 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
12794 || TREE_PRIVATE (t))
12795 {
12796 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
12797 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
12798 }
12799 struct gimplify_omp_ctx *outer
12800 = gimplify_omp_ctxp->outer_context;
12801 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
12802 {
12803 if (outer->region_type == ORT_WORKSHARE
12804 && outer->combined_loop)
12805 {
12806 n = splay_tree_lookup (outer->variables,
12807 (splay_tree_key)decl);
12808 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
12809 {
12810 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
12811 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
12812 }
12813 else
12814 {
12815 struct gimplify_omp_ctx *octx = outer->outer_context;
12816 if (octx
12817 && octx->region_type == ORT_COMBINED_PARALLEL
12818 && octx->outer_context
12819 && (octx->outer_context->region_type
12820 == ORT_WORKSHARE)
12821 && octx->outer_context->combined_loop)
12822 {
12823 octx = octx->outer_context;
12824 n = splay_tree_lookup (octx->variables,
12825 (splay_tree_key)decl);
12826 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
12827 {
12828 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
12829 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
12830 }
12831 }
12832 }
12833 }
12834 }
12835
12836 OMP_CLAUSE_DECL (c) = decl;
12837 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
12838 OMP_FOR_CLAUSES (for_stmt) = c;
12839 omp_add_variable (gimplify_omp_ctxp, decl, flags);
12840 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
12841 omp_lastprivate_for_combined_outer_constructs (outer, decl,
12842 true);
12843 }
12844 else
12845 {
12846 bool lastprivate
12847 = (!has_decl_expr
12848 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
12849 if (TREE_PRIVATE (t))
12850 lastprivate = false;
12851 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
12852 {
12853 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12854 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
12855 lastprivate = false;
12856 }
12857
12858 struct gimplify_omp_ctx *outer
12859 = gimplify_omp_ctxp->outer_context;
12860 if (outer && lastprivate)
12861 omp_lastprivate_for_combined_outer_constructs (outer, decl,
12862 true);
12863
12864 c = build_omp_clause (input_location,
12865 lastprivate ? OMP_CLAUSE_LASTPRIVATE
12866 : OMP_CLAUSE_PRIVATE);
12867 OMP_CLAUSE_DECL (c) = decl;
12868 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
12869 OMP_FOR_CLAUSES (for_stmt) = c;
12870 omp_add_variable (gimplify_omp_ctxp, decl,
12871 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
12872 | GOVD_EXPLICIT | GOVD_SEEN);
12873 c = NULL_TREE;
12874 }
12875 }
12876 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
12877 {
12878 omp_notice_variable (gimplify_omp_ctxp, decl, true);
12879 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
12880 (splay_tree_key) decl);
12881 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
12882 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
12883 OMP_CLAUSE_LASTPRIVATE);
12884 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
12885 OMP_CLAUSE_LASTPRIVATE))
12886 if (OMP_CLAUSE_DECL (c3) == decl)
12887 {
12888 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
12889 "conditional %<lastprivate%> on loop "
12890 "iterator %qD ignored", decl);
12891 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
12892 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
12893 }
12894 }
12895 else
12896 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
12897
12898 /* If DECL is not a gimple register, create a temporary variable to act
12899 as an iteration counter. This is valid, since DECL cannot be
12900 modified in the body of the loop. Similarly for any iteration vars
12901 in simd with collapse > 1 where the iterator vars must be
12902 lastprivate. And similarly for vars mentioned in allocate clauses. */
12903 if (orig_for_stmt != for_stmt)
12904 var = decl;
12905 else if (!is_gimple_reg (decl)
12906 || (ort == ORT_SIMD
12907 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
12908 || (allocate_uids && allocate_uids->contains (decl)))
12909 {
12910 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12911 /* Make sure omp_add_variable is not called on it prematurely.
12912 We call it ourselves a few lines later. */
12913 gimplify_omp_ctxp = NULL;
12914 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
12915 gimplify_omp_ctxp = ctx;
12916 TREE_OPERAND (t, 0) = var;
12917
12918 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
12919
12920 if (ort == ORT_SIMD
12921 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
12922 {
12923 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
12924 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
12925 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
12926 OMP_CLAUSE_DECL (c2) = var;
12927 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
12928 OMP_FOR_CLAUSES (for_stmt) = c2;
12929 omp_add_variable (gimplify_omp_ctxp, var,
12930 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
12931 if (c == NULL_TREE)
12932 {
12933 c = c2;
12934 c2 = NULL_TREE;
12935 }
12936 }
12937 else
12938 omp_add_variable (gimplify_omp_ctxp, var,
12939 GOVD_PRIVATE | GOVD_SEEN);
12940 }
12941 else
12942 var = decl;
12943
12944 gimplify_omp_ctxp->in_for_exprs = true;
12945 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
12946 {
12947 tree lb = TREE_OPERAND (t, 1);
12948 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
12949 is_gimple_val, fb_rvalue, false);
12950 ret = MIN (ret, tret);
12951 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
12952 is_gimple_val, fb_rvalue, false);
12953 }
12954 else
12955 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
12956 is_gimple_val, fb_rvalue, false);
12957 gimplify_omp_ctxp->in_for_exprs = false;
12958 ret = MIN (ret, tret);
12959 if (ret == GS_ERROR)
12960 return ret;
12961
12962 /* Handle OMP_FOR_COND. */
12963 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
12964 gcc_assert (COMPARISON_CLASS_P (t));
12965 gcc_assert (TREE_OPERAND (t, 0) == decl);
12966
12967 gimplify_omp_ctxp->in_for_exprs = true;
12968 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
12969 {
12970 tree ub = TREE_OPERAND (t, 1);
12971 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
12972 is_gimple_val, fb_rvalue, false);
12973 ret = MIN (ret, tret);
12974 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
12975 is_gimple_val, fb_rvalue, false);
12976 }
12977 else
12978 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
12979 is_gimple_val, fb_rvalue, false);
12980 gimplify_omp_ctxp->in_for_exprs = false;
12981 ret = MIN (ret, tret);
12982
12983 /* Handle OMP_FOR_INCR. */
12984 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12985 switch (TREE_CODE (t))
12986 {
12987 case PREINCREMENT_EXPR:
12988 case POSTINCREMENT_EXPR:
12989 {
12990 tree decl = TREE_OPERAND (t, 0);
12991 /* c_omp_for_incr_canonicalize_ptr() should have been
12992 called to massage things appropriately. */
12993 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
12994
12995 if (orig_for_stmt != for_stmt)
12996 break;
12997 t = build_int_cst (TREE_TYPE (decl), 1);
12998 if (c)
12999 OMP_CLAUSE_LINEAR_STEP (c) = t;
13000 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
13001 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
13002 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
13003 break;
13004 }
13005
13006 case PREDECREMENT_EXPR:
13007 case POSTDECREMENT_EXPR:
13008 /* c_omp_for_incr_canonicalize_ptr() should have been
13009 called to massage things appropriately. */
13010 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
13011 if (orig_for_stmt != for_stmt)
13012 break;
13013 t = build_int_cst (TREE_TYPE (decl), -1);
13014 if (c)
13015 OMP_CLAUSE_LINEAR_STEP (c) = t;
13016 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
13017 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
13018 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
13019 break;
13020
13021 case MODIFY_EXPR:
13022 gcc_assert (TREE_OPERAND (t, 0) == decl);
13023 TREE_OPERAND (t, 0) = var;
13024
13025 t = TREE_OPERAND (t, 1);
13026 switch (TREE_CODE (t))
13027 {
13028 case PLUS_EXPR:
13029 if (TREE_OPERAND (t, 1) == decl)
13030 {
13031 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
13032 TREE_OPERAND (t, 0) = var;
13033 break;
13034 }
13035
13036 /* Fallthru. */
13037 case MINUS_EXPR:
13038 case POINTER_PLUS_EXPR:
13039 gcc_assert (TREE_OPERAND (t, 0) == decl);
13040 TREE_OPERAND (t, 0) = var;
13041 break;
13042 default:
13043 gcc_unreachable ();
13044 }
13045
13046 gimplify_omp_ctxp->in_for_exprs = true;
13047 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
13048 is_gimple_val, fb_rvalue, false);
13049 ret = MIN (ret, tret);
13050 if (c)
13051 {
13052 tree step = TREE_OPERAND (t, 1);
13053 tree stept = TREE_TYPE (decl);
13054 if (POINTER_TYPE_P (stept))
13055 stept = sizetype;
13056 step = fold_convert (stept, step);
13057 if (TREE_CODE (t) == MINUS_EXPR)
13058 step = fold_build1 (NEGATE_EXPR, stept, step);
13059 OMP_CLAUSE_LINEAR_STEP (c) = step;
13060 if (step != TREE_OPERAND (t, 1))
13061 {
13062 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
13063 &for_pre_body, NULL,
13064 is_gimple_val, fb_rvalue, false);
13065 ret = MIN (ret, tret);
13066 }
13067 }
13068 gimplify_omp_ctxp->in_for_exprs = false;
13069 break;
13070
13071 default:
13072 gcc_unreachable ();
13073 }
13074
13075 if (c2)
13076 {
13077 gcc_assert (c);
13078 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
13079 }
13080
13081 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
13082 {
13083 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
13084 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
13085 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
13086 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13087 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
13088 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
13089 && OMP_CLAUSE_DECL (c) == decl)
13090 {
13091 if (is_doacross && (collapse == 1 || i >= collapse))
13092 t = var;
13093 else
13094 {
13095 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
13096 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13097 gcc_assert (TREE_OPERAND (t, 0) == var);
13098 t = TREE_OPERAND (t, 1);
13099 gcc_assert (TREE_CODE (t) == PLUS_EXPR
13100 || TREE_CODE (t) == MINUS_EXPR
13101 || TREE_CODE (t) == POINTER_PLUS_EXPR);
13102 gcc_assert (TREE_OPERAND (t, 0) == var);
13103 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
13104 is_doacross ? var : decl,
13105 TREE_OPERAND (t, 1));
13106 }
13107 gimple_seq *seq;
13108 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
13109 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
13110 else
13111 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
13112 push_gimplify_context ();
13113 gimplify_assign (decl, t, seq);
13114 gimple *bind = NULL;
13115 if (gimplify_ctxp->temps)
13116 {
13117 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
13118 *seq = NULL;
13119 gimplify_seq_add_stmt (seq, bind);
13120 }
13121 pop_gimplify_context (bind);
13122 }
13123 }
13124 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
13125 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
13126 {
13127 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
13128 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13129 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
13130 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
13131 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
13132 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
13133 gcc_assert (COMPARISON_CLASS_P (t));
13134 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
13135 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
13136 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
13137 }
13138 }
13139
13140 BITMAP_FREE (has_decl_expr);
13141 delete allocate_uids;
13142
13143 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
13144 || (loop_p && orig_for_stmt == for_stmt))
13145 {
13146 push_gimplify_context ();
13147 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
13148 {
13149 OMP_FOR_BODY (orig_for_stmt)
13150 = build3 (BIND_EXPR, void_type_node, NULL,
13151 OMP_FOR_BODY (orig_for_stmt), NULL);
13152 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
13153 }
13154 }
13155
13156 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
13157 &for_body);
13158
13159 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
13160 || (loop_p && orig_for_stmt == for_stmt))
13161 {
13162 if (gimple_code (g) == GIMPLE_BIND)
13163 pop_gimplify_context (g);
13164 else
13165 pop_gimplify_context (NULL);
13166 }
13167
13168 if (orig_for_stmt != for_stmt)
13169 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13170 {
13171 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
13172 decl = TREE_OPERAND (t, 0);
13173 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
13174 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
13175 gimplify_omp_ctxp = ctx->outer_context;
13176 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
13177 gimplify_omp_ctxp = ctx;
13178 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
13179 TREE_OPERAND (t, 0) = var;
13180 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
13181 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
13182 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
13183 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
13184 for (int j = i + 1;
13185 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
13186 {
13187 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
13188 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13189 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
13190 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
13191 {
13192 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
13193 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
13194 }
13195 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
13196 gcc_assert (COMPARISON_CLASS_P (t));
13197 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
13198 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
13199 {
13200 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
13201 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
13202 }
13203 }
13204 }
13205
13206 gimplify_adjust_omp_clauses (pre_p, for_body,
13207 &OMP_FOR_CLAUSES (orig_for_stmt),
13208 TREE_CODE (orig_for_stmt));
13209
13210 int kind;
13211 switch (TREE_CODE (orig_for_stmt))
13212 {
13213 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
13214 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
13215 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
13216 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
13217 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
13218 default:
13219 gcc_unreachable ();
13220 }
13221 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
13222 {
13223 gimplify_seq_add_seq (pre_p, for_pre_body);
13224 for_pre_body = NULL;
13225 }
13226 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
13227 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
13228 for_pre_body);
13229 if (orig_for_stmt != for_stmt)
13230 gimple_omp_for_set_combined_p (gfor, true);
13231 if (gimplify_omp_ctxp
13232 && (gimplify_omp_ctxp->combined_loop
13233 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
13234 && gimplify_omp_ctxp->outer_context
13235 && gimplify_omp_ctxp->outer_context->combined_loop)))
13236 {
13237 gimple_omp_for_set_combined_into_p (gfor, true);
13238 if (gimplify_omp_ctxp->combined_loop)
13239 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
13240 else
13241 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
13242 }
13243
13244 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13245 {
13246 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
13247 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
13248 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
13249 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
13250 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
13251 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
13252 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
13253 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
13254 }
13255
13256 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
13257 constructs with GIMPLE_OMP_TASK sandwiched in between them.
13258 The outer taskloop stands for computing the number of iterations,
13259 counts for collapsed loops and holding taskloop specific clauses.
13260 The task construct stands for the effect of data sharing on the
13261 explicit task it creates and the inner taskloop stands for expansion
13262 of the static loop inside of the explicit task construct. */
13263 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
13264 {
13265 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
13266 tree task_clauses = NULL_TREE;
13267 tree c = *gfor_clauses_ptr;
13268 tree *gtask_clauses_ptr = &task_clauses;
13269 tree outer_for_clauses = NULL_TREE;
13270 tree *gforo_clauses_ptr = &outer_for_clauses;
13271 bitmap lastprivate_uids = NULL;
13272 if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
13273 {
13274 c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
13275 if (c)
13276 {
13277 lastprivate_uids = BITMAP_ALLOC (NULL);
13278 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
13279 OMP_CLAUSE_LASTPRIVATE))
13280 bitmap_set_bit (lastprivate_uids,
13281 DECL_UID (OMP_CLAUSE_DECL (c)));
13282 }
13283 c = *gfor_clauses_ptr;
13284 }
13285 for (; c; c = OMP_CLAUSE_CHAIN (c))
13286 switch (OMP_CLAUSE_CODE (c))
13287 {
13288 /* These clauses are allowed on task, move them there. */
13289 case OMP_CLAUSE_SHARED:
13290 case OMP_CLAUSE_FIRSTPRIVATE:
13291 case OMP_CLAUSE_DEFAULT:
13292 case OMP_CLAUSE_IF:
13293 case OMP_CLAUSE_UNTIED:
13294 case OMP_CLAUSE_FINAL:
13295 case OMP_CLAUSE_MERGEABLE:
13296 case OMP_CLAUSE_PRIORITY:
13297 case OMP_CLAUSE_REDUCTION:
13298 case OMP_CLAUSE_IN_REDUCTION:
13299 *gtask_clauses_ptr = c;
13300 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
13301 break;
13302 case OMP_CLAUSE_PRIVATE:
13303 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
13304 {
13305 /* We want private on outer for and firstprivate
13306 on task. */
13307 *gtask_clauses_ptr
13308 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
13309 OMP_CLAUSE_FIRSTPRIVATE);
13310 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
13311 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
13312 openacc);
13313 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
13314 *gforo_clauses_ptr = c;
13315 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
13316 }
13317 else
13318 {
13319 *gtask_clauses_ptr = c;
13320 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
13321 }
13322 break;
13323 /* These clauses go into outer taskloop clauses. */
13324 case OMP_CLAUSE_GRAINSIZE:
13325 case OMP_CLAUSE_NUM_TASKS:
13326 case OMP_CLAUSE_NOGROUP:
13327 *gforo_clauses_ptr = c;
13328 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
13329 break;
13330 /* Collapse clause we duplicate on both taskloops. */
13331 case OMP_CLAUSE_COLLAPSE:
13332 *gfor_clauses_ptr = c;
13333 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
13334 *gforo_clauses_ptr = copy_node (c);
13335 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
13336 break;
13337 /* For lastprivate, keep the clause on inner taskloop, and add
13338 a shared clause on task. If the same decl is also firstprivate,
13339 add also firstprivate clause on the inner taskloop. */
13340 case OMP_CLAUSE_LASTPRIVATE:
13341 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
13342 {
13343 /* For taskloop C++ lastprivate IVs, we want:
13344 1) private on outer taskloop
13345 2) firstprivate and shared on task
13346 3) lastprivate on inner taskloop */
13347 *gtask_clauses_ptr
13348 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
13349 OMP_CLAUSE_FIRSTPRIVATE);
13350 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
13351 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
13352 openacc);
13353 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
13354 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
13355 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
13356 OMP_CLAUSE_PRIVATE);
13357 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
13358 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
13359 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
13360 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
13361 }
13362 *gfor_clauses_ptr = c;
13363 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
13364 *gtask_clauses_ptr
13365 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
13366 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
13367 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
13368 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
13369 gtask_clauses_ptr
13370 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
13371 break;
13372 /* Allocate clause we duplicate on task and inner taskloop
13373 if the decl is lastprivate, otherwise just put on task. */
13374 case OMP_CLAUSE_ALLOCATE:
13375 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
13376 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
13377 {
13378 /* Additionally, put firstprivate clause on task
13379 for the allocator if it is not constant. */
13380 *gtask_clauses_ptr
13381 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
13382 OMP_CLAUSE_FIRSTPRIVATE);
13383 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
13384 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
13385 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
13386 }
13387 if (lastprivate_uids
13388 && bitmap_bit_p (lastprivate_uids,
13389 DECL_UID (OMP_CLAUSE_DECL (c))))
13390 {
13391 *gfor_clauses_ptr = c;
13392 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
13393 *gtask_clauses_ptr = copy_node (c);
13394 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
13395 }
13396 else
13397 {
13398 *gtask_clauses_ptr = c;
13399 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
13400 }
13401 break;
13402 default:
13403 gcc_unreachable ();
13404 }
13405 *gfor_clauses_ptr = NULL_TREE;
13406 *gtask_clauses_ptr = NULL_TREE;
13407 *gforo_clauses_ptr = NULL_TREE;
13408 BITMAP_FREE (lastprivate_uids);
13409 gimple_set_location (gfor, input_location);
13410 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
13411 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
13412 NULL_TREE, NULL_TREE, NULL_TREE);
13413 gimple_set_location (g, input_location);
13414 gimple_omp_task_set_taskloop_p (g, true);
13415 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
13416 gomp_for *gforo
13417 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
13418 gimple_omp_for_collapse (gfor),
13419 gimple_omp_for_pre_body (gfor));
13420 gimple_omp_for_set_pre_body (gfor, NULL);
13421 gimple_omp_for_set_combined_p (gforo, true);
13422 gimple_omp_for_set_combined_into_p (gfor, true);
13423 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
13424 {
13425 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
13426 tree v = create_tmp_var (type);
13427 gimple_omp_for_set_index (gforo, i, v);
13428 t = unshare_expr (gimple_omp_for_initial (gfor, i));
13429 gimple_omp_for_set_initial (gforo, i, t);
13430 gimple_omp_for_set_cond (gforo, i,
13431 gimple_omp_for_cond (gfor, i));
13432 t = unshare_expr (gimple_omp_for_final (gfor, i));
13433 gimple_omp_for_set_final (gforo, i, t);
13434 t = unshare_expr (gimple_omp_for_incr (gfor, i));
13435 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
13436 TREE_OPERAND (t, 0) = v;
13437 gimple_omp_for_set_incr (gforo, i, t);
13438 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
13439 OMP_CLAUSE_DECL (t) = v;
13440 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
13441 gimple_omp_for_set_clauses (gforo, t);
13442 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
13443 {
13444 tree *p1 = NULL, *p2 = NULL;
13445 t = gimple_omp_for_initial (gforo, i);
13446 if (TREE_CODE (t) == TREE_VEC)
13447 p1 = &TREE_VEC_ELT (t, 0);
13448 t = gimple_omp_for_final (gforo, i);
13449 if (TREE_CODE (t) == TREE_VEC)
13450 {
13451 if (p1)
13452 p2 = &TREE_VEC_ELT (t, 0);
13453 else
13454 p1 = &TREE_VEC_ELT (t, 0);
13455 }
13456 if (p1)
13457 {
13458 int j;
13459 for (j = 0; j < i; j++)
13460 if (*p1 == gimple_omp_for_index (gfor, j))
13461 {
13462 *p1 = gimple_omp_for_index (gforo, j);
13463 if (p2)
13464 *p2 = *p1;
13465 break;
13466 }
13467 gcc_assert (j < i);
13468 }
13469 }
13470 }
13471 gimplify_seq_add_stmt (pre_p, gforo);
13472 }
13473 else
13474 gimplify_seq_add_stmt (pre_p, gfor);
13475
13476 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
13477 {
13478 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
13479 unsigned lastprivate_conditional = 0;
13480 while (ctx
13481 && (ctx->region_type == ORT_TARGET_DATA
13482 || ctx->region_type == ORT_TASKGROUP))
13483 ctx = ctx->outer_context;
13484 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
13485 for (tree c = gimple_omp_for_clauses (gfor);
13486 c; c = OMP_CLAUSE_CHAIN (c))
13487 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
13488 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
13489 ++lastprivate_conditional;
13490 if (lastprivate_conditional)
13491 {
13492 struct omp_for_data fd;
13493 omp_extract_for_data (gfor, &fd, NULL);
13494 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
13495 lastprivate_conditional);
13496 tree var = create_tmp_var_raw (type);
13497 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
13498 OMP_CLAUSE_DECL (c) = var;
13499 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
13500 gimple_omp_for_set_clauses (gfor, c);
13501 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
13502 }
13503 }
13504 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
13505 {
13506 unsigned lastprivate_conditional = 0;
13507 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
13508 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
13509 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
13510 ++lastprivate_conditional;
13511 if (lastprivate_conditional)
13512 {
13513 struct omp_for_data fd;
13514 omp_extract_for_data (gfor, &fd, NULL);
13515 tree type = unsigned_type_for (fd.iter_type);
13516 while (lastprivate_conditional--)
13517 {
13518 tree c = build_omp_clause (UNKNOWN_LOCATION,
13519 OMP_CLAUSE__CONDTEMP_);
13520 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
13521 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
13522 gimple_omp_for_set_clauses (gfor, c);
13523 }
13524 }
13525 }
13526
13527 if (ret != GS_ALL_DONE)
13528 return GS_ERROR;
13529 *expr_p = NULL_TREE;
13530 return GS_ALL_DONE;
13531 }
13532
13533 /* Helper for gimplify_omp_loop, called through walk_tree. */
13534
13535 static tree
note_no_context_vars(tree * tp,int *,void * data)13536 note_no_context_vars (tree *tp, int *, void *data)
13537 {
13538 if (VAR_P (*tp)
13539 && DECL_CONTEXT (*tp) == NULL_TREE
13540 && !is_global_var (*tp))
13541 {
13542 vec<tree> *d = (vec<tree> *) data;
13543 d->safe_push (*tp);
13544 DECL_CONTEXT (*tp) = current_function_decl;
13545 }
13546 return NULL_TREE;
13547 }
13548
13549 /* Gimplify the gross structure of an OMP_LOOP statement. */
13550
13551 static enum gimplify_status
gimplify_omp_loop(tree * expr_p,gimple_seq * pre_p)13552 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
13553 {
13554 tree for_stmt = *expr_p;
13555 tree clauses = OMP_FOR_CLAUSES (for_stmt);
13556 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
13557 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
13558 int i;
13559
13560 /* If order is not present, the behavior is as if order(concurrent)
13561 appeared. */
13562 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
13563 if (order == NULL_TREE)
13564 {
13565 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
13566 OMP_CLAUSE_CHAIN (order) = clauses;
13567 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
13568 }
13569
13570 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
13571 if (bind == NULL_TREE)
13572 {
13573 if (!flag_openmp) /* flag_openmp_simd */
13574 ;
13575 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
13576 kind = OMP_CLAUSE_BIND_TEAMS;
13577 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
13578 kind = OMP_CLAUSE_BIND_PARALLEL;
13579 else
13580 {
13581 for (; octx; octx = octx->outer_context)
13582 {
13583 if ((octx->region_type & ORT_ACC) != 0
13584 || octx->region_type == ORT_NONE
13585 || octx->region_type == ORT_IMPLICIT_TARGET)
13586 continue;
13587 break;
13588 }
13589 if (octx == NULL && !in_omp_construct)
13590 error_at (EXPR_LOCATION (for_stmt),
13591 "%<bind%> clause not specified on a %<loop%> "
13592 "construct not nested inside another OpenMP construct");
13593 }
13594 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
13595 OMP_CLAUSE_CHAIN (bind) = clauses;
13596 OMP_CLAUSE_BIND_KIND (bind) = kind;
13597 OMP_FOR_CLAUSES (for_stmt) = bind;
13598 }
13599 else
13600 switch (OMP_CLAUSE_BIND_KIND (bind))
13601 {
13602 case OMP_CLAUSE_BIND_THREAD:
13603 break;
13604 case OMP_CLAUSE_BIND_PARALLEL:
13605 if (!flag_openmp) /* flag_openmp_simd */
13606 {
13607 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
13608 break;
13609 }
13610 for (; octx; octx = octx->outer_context)
13611 if (octx->region_type == ORT_SIMD
13612 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
13613 {
13614 error_at (EXPR_LOCATION (for_stmt),
13615 "%<bind(parallel)%> on a %<loop%> construct nested "
13616 "inside %<simd%> construct");
13617 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
13618 break;
13619 }
13620 kind = OMP_CLAUSE_BIND_PARALLEL;
13621 break;
13622 case OMP_CLAUSE_BIND_TEAMS:
13623 if (!flag_openmp) /* flag_openmp_simd */
13624 {
13625 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
13626 break;
13627 }
13628 if ((octx
13629 && octx->region_type != ORT_IMPLICIT_TARGET
13630 && octx->region_type != ORT_NONE
13631 && (octx->region_type & ORT_TEAMS) == 0)
13632 || in_omp_construct)
13633 {
13634 error_at (EXPR_LOCATION (for_stmt),
13635 "%<bind(teams)%> on a %<loop%> region not strictly "
13636 "nested inside of a %<teams%> region");
13637 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
13638 break;
13639 }
13640 kind = OMP_CLAUSE_BIND_TEAMS;
13641 break;
13642 default:
13643 gcc_unreachable ();
13644 }
13645
13646 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
13647 switch (OMP_CLAUSE_CODE (*pc))
13648 {
13649 case OMP_CLAUSE_REDUCTION:
13650 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
13651 {
13652 error_at (OMP_CLAUSE_LOCATION (*pc),
13653 "%<inscan%> %<reduction%> clause on "
13654 "%qs construct", "loop");
13655 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
13656 }
13657 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
13658 {
13659 error_at (OMP_CLAUSE_LOCATION (*pc),
13660 "invalid %<task%> reduction modifier on construct "
13661 "other than %<parallel%>, %qs or %<sections%>",
13662 lang_GNU_Fortran () ? "do" : "for");
13663 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
13664 }
13665 pc = &OMP_CLAUSE_CHAIN (*pc);
13666 break;
13667 case OMP_CLAUSE_LASTPRIVATE:
13668 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13669 {
13670 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
13671 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13672 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
13673 break;
13674 if (OMP_FOR_ORIG_DECLS (for_stmt)
13675 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
13676 i)) == TREE_LIST
13677 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
13678 i)))
13679 {
13680 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
13681 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
13682 break;
13683 }
13684 }
13685 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
13686 {
13687 error_at (OMP_CLAUSE_LOCATION (*pc),
13688 "%<lastprivate%> clause on a %<loop%> construct refers "
13689 "to a variable %qD which is not the loop iterator",
13690 OMP_CLAUSE_DECL (*pc));
13691 *pc = OMP_CLAUSE_CHAIN (*pc);
13692 break;
13693 }
13694 pc = &OMP_CLAUSE_CHAIN (*pc);
13695 break;
13696 default:
13697 pc = &OMP_CLAUSE_CHAIN (*pc);
13698 break;
13699 }
13700
13701 TREE_SET_CODE (for_stmt, OMP_SIMD);
13702
13703 int last;
13704 switch (kind)
13705 {
13706 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
13707 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
13708 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
13709 }
13710 for (int pass = 1; pass <= last; pass++)
13711 {
13712 if (pass == 2)
13713 {
13714 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
13715 make_node (BLOCK));
13716 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
13717 *expr_p = make_node (OMP_PARALLEL);
13718 TREE_TYPE (*expr_p) = void_type_node;
13719 OMP_PARALLEL_BODY (*expr_p) = bind;
13720 OMP_PARALLEL_COMBINED (*expr_p) = 1;
13721 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
13722 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
13723 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13724 if (OMP_FOR_ORIG_DECLS (for_stmt)
13725 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
13726 == TREE_LIST))
13727 {
13728 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
13729 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
13730 {
13731 *pc = build_omp_clause (UNKNOWN_LOCATION,
13732 OMP_CLAUSE_FIRSTPRIVATE);
13733 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
13734 pc = &OMP_CLAUSE_CHAIN (*pc);
13735 }
13736 }
13737 }
13738 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
13739 tree *pc = &OMP_FOR_CLAUSES (t);
13740 TREE_TYPE (t) = void_type_node;
13741 OMP_FOR_BODY (t) = *expr_p;
13742 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
13743 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
13744 switch (OMP_CLAUSE_CODE (c))
13745 {
13746 case OMP_CLAUSE_BIND:
13747 case OMP_CLAUSE_ORDER:
13748 case OMP_CLAUSE_COLLAPSE:
13749 *pc = copy_node (c);
13750 pc = &OMP_CLAUSE_CHAIN (*pc);
13751 break;
13752 case OMP_CLAUSE_PRIVATE:
13753 case OMP_CLAUSE_FIRSTPRIVATE:
13754 /* Only needed on innermost. */
13755 break;
13756 case OMP_CLAUSE_LASTPRIVATE:
13757 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
13758 {
13759 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
13760 OMP_CLAUSE_FIRSTPRIVATE);
13761 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
13762 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
13763 pc = &OMP_CLAUSE_CHAIN (*pc);
13764 }
13765 *pc = copy_node (c);
13766 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
13767 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
13768 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
13769 {
13770 if (pass != last)
13771 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
13772 else
13773 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
13774 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
13775 }
13776 pc = &OMP_CLAUSE_CHAIN (*pc);
13777 break;
13778 case OMP_CLAUSE_REDUCTION:
13779 *pc = copy_node (c);
13780 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
13781 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
13782 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
13783 {
13784 auto_vec<tree> no_context_vars;
13785 int walk_subtrees = 0;
13786 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
13787 &walk_subtrees, &no_context_vars);
13788 if (tree p = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c))
13789 note_no_context_vars (&p, &walk_subtrees, &no_context_vars);
13790 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c),
13791 note_no_context_vars,
13792 &no_context_vars);
13793 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c),
13794 note_no_context_vars,
13795 &no_context_vars);
13796
13797 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
13798 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
13799 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
13800 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
13801 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
13802
13803 hash_map<tree, tree> decl_map;
13804 decl_map.put (OMP_CLAUSE_DECL (c), OMP_CLAUSE_DECL (c));
13805 decl_map.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
13806 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc));
13807 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
13808 decl_map.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
13809 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc));
13810
13811 copy_body_data id;
13812 memset (&id, 0, sizeof (id));
13813 id.src_fn = current_function_decl;
13814 id.dst_fn = current_function_decl;
13815 id.src_cfun = cfun;
13816 id.decl_map = &decl_map;
13817 id.copy_decl = copy_decl_no_change;
13818 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
13819 id.transform_new_cfg = true;
13820 id.transform_return_to_modify = false;
13821 id.eh_lp_nr = 0;
13822 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc), copy_tree_body_r,
13823 &id, NULL);
13824 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc), copy_tree_body_r,
13825 &id, NULL);
13826
13827 for (tree d : no_context_vars)
13828 {
13829 DECL_CONTEXT (d) = NULL_TREE;
13830 DECL_CONTEXT (*decl_map.get (d)) = NULL_TREE;
13831 }
13832 }
13833 else
13834 {
13835 OMP_CLAUSE_REDUCTION_INIT (*pc)
13836 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
13837 OMP_CLAUSE_REDUCTION_MERGE (*pc)
13838 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
13839 }
13840 pc = &OMP_CLAUSE_CHAIN (*pc);
13841 break;
13842 default:
13843 gcc_unreachable ();
13844 }
13845 *pc = NULL_TREE;
13846 *expr_p = t;
13847 }
13848 return gimplify_expr (expr_p, pre_p, NULL, is_gimple_stmt, fb_none);
13849 }
13850
13851
13852 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
13853 of OMP_TARGET's body. */
13854
13855 static tree
find_omp_teams(tree * tp,int * walk_subtrees,void *)13856 find_omp_teams (tree *tp, int *walk_subtrees, void *)
13857 {
13858 *walk_subtrees = 0;
13859 switch (TREE_CODE (*tp))
13860 {
13861 case OMP_TEAMS:
13862 return *tp;
13863 case BIND_EXPR:
13864 case STATEMENT_LIST:
13865 *walk_subtrees = 1;
13866 break;
13867 default:
13868 break;
13869 }
13870 return NULL_TREE;
13871 }
13872
13873 /* Helper function of optimize_target_teams, determine if the expression
13874 can be computed safely before the target construct on the host. */
13875
13876 static tree
computable_teams_clause(tree * tp,int * walk_subtrees,void *)13877 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
13878 {
13879 splay_tree_node n;
13880
13881 if (TYPE_P (*tp))
13882 {
13883 *walk_subtrees = 0;
13884 return NULL_TREE;
13885 }
13886 switch (TREE_CODE (*tp))
13887 {
13888 case VAR_DECL:
13889 case PARM_DECL:
13890 case RESULT_DECL:
13891 *walk_subtrees = 0;
13892 if (error_operand_p (*tp)
13893 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
13894 || DECL_HAS_VALUE_EXPR_P (*tp)
13895 || DECL_THREAD_LOCAL_P (*tp)
13896 || TREE_SIDE_EFFECTS (*tp)
13897 || TREE_THIS_VOLATILE (*tp))
13898 return *tp;
13899 if (is_global_var (*tp)
13900 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
13901 || lookup_attribute ("omp declare target link",
13902 DECL_ATTRIBUTES (*tp))))
13903 return *tp;
13904 if (VAR_P (*tp)
13905 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
13906 && !is_global_var (*tp)
13907 && decl_function_context (*tp) == current_function_decl)
13908 return *tp;
13909 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
13910 (splay_tree_key) *tp);
13911 if (n == NULL)
13912 {
13913 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
13914 return NULL_TREE;
13915 return *tp;
13916 }
13917 else if (n->value & GOVD_LOCAL)
13918 return *tp;
13919 else if (n->value & GOVD_FIRSTPRIVATE)
13920 return NULL_TREE;
13921 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
13922 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
13923 return NULL_TREE;
13924 return *tp;
13925 case INTEGER_CST:
13926 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
13927 return *tp;
13928 return NULL_TREE;
13929 case TARGET_EXPR:
13930 if (TARGET_EXPR_INITIAL (*tp)
13931 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
13932 return *tp;
13933 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
13934 walk_subtrees, NULL);
13935 /* Allow some reasonable subset of integral arithmetics. */
13936 case PLUS_EXPR:
13937 case MINUS_EXPR:
13938 case MULT_EXPR:
13939 case TRUNC_DIV_EXPR:
13940 case CEIL_DIV_EXPR:
13941 case FLOOR_DIV_EXPR:
13942 case ROUND_DIV_EXPR:
13943 case TRUNC_MOD_EXPR:
13944 case CEIL_MOD_EXPR:
13945 case FLOOR_MOD_EXPR:
13946 case ROUND_MOD_EXPR:
13947 case RDIV_EXPR:
13948 case EXACT_DIV_EXPR:
13949 case MIN_EXPR:
13950 case MAX_EXPR:
13951 case LSHIFT_EXPR:
13952 case RSHIFT_EXPR:
13953 case BIT_IOR_EXPR:
13954 case BIT_XOR_EXPR:
13955 case BIT_AND_EXPR:
13956 case NEGATE_EXPR:
13957 case ABS_EXPR:
13958 case BIT_NOT_EXPR:
13959 case NON_LVALUE_EXPR:
13960 CASE_CONVERT:
13961 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
13962 return *tp;
13963 return NULL_TREE;
13964 /* And disallow anything else, except for comparisons. */
13965 default:
13966 if (COMPARISON_CLASS_P (*tp))
13967 return NULL_TREE;
13968 return *tp;
13969 }
13970 }
13971
13972 /* Try to determine if the num_teams and/or thread_limit expressions
13973 can have their values determined already before entering the
13974 target construct.
13975 INTEGER_CSTs trivially are,
13976 integral decls that are firstprivate (explicitly or implicitly)
13977 or explicitly map(always, to:) or map(always, tofrom:) on the target
13978 region too, and expressions involving simple arithmetics on those
13979 too, function calls are not ok, dereferencing something neither etc.
13980 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
13981 EXPR based on what we find:
13982 0 stands for clause not specified at all, use implementation default
13983 -1 stands for value that can't be determined easily before entering
13984 the target construct.
13985 If teams construct is not present at all, use 1 for num_teams
13986 and 0 for thread_limit (only one team is involved, and the thread
13987 limit is implementation defined. */
13988
13989 static void
optimize_target_teams(tree target,gimple_seq * pre_p)13990 optimize_target_teams (tree target, gimple_seq *pre_p)
13991 {
13992 tree body = OMP_BODY (target);
13993 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
13994 tree num_teams_lower = NULL_TREE;
13995 tree num_teams_upper = integer_zero_node;
13996 tree thread_limit = integer_zero_node;
13997 location_t num_teams_loc = EXPR_LOCATION (target);
13998 location_t thread_limit_loc = EXPR_LOCATION (target);
13999 tree c, *p, expr;
14000 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
14001
14002 if (teams == NULL_TREE)
14003 num_teams_upper = integer_one_node;
14004 else
14005 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
14006 {
14007 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
14008 {
14009 p = &num_teams_upper;
14010 num_teams_loc = OMP_CLAUSE_LOCATION (c);
14011 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c))
14012 {
14013 expr = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c);
14014 if (TREE_CODE (expr) == INTEGER_CST)
14015 num_teams_lower = expr;
14016 else if (walk_tree (&expr, computable_teams_clause,
14017 NULL, NULL))
14018 num_teams_lower = integer_minus_one_node;
14019 else
14020 {
14021 num_teams_lower = expr;
14022 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
14023 if (gimplify_expr (&num_teams_lower, pre_p, NULL,
14024 is_gimple_val, fb_rvalue, false)
14025 == GS_ERROR)
14026 {
14027 gimplify_omp_ctxp = target_ctx;
14028 num_teams_lower = integer_minus_one_node;
14029 }
14030 else
14031 {
14032 gimplify_omp_ctxp = target_ctx;
14033 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
14034 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
14035 = num_teams_lower;
14036 }
14037 }
14038 }
14039 }
14040 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
14041 {
14042 p = &thread_limit;
14043 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
14044 }
14045 else
14046 continue;
14047 expr = OMP_CLAUSE_OPERAND (c, 0);
14048 if (TREE_CODE (expr) == INTEGER_CST)
14049 {
14050 *p = expr;
14051 continue;
14052 }
14053 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
14054 {
14055 *p = integer_minus_one_node;
14056 continue;
14057 }
14058 *p = expr;
14059 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
14060 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
14061 == GS_ERROR)
14062 {
14063 gimplify_omp_ctxp = target_ctx;
14064 *p = integer_minus_one_node;
14065 continue;
14066 }
14067 gimplify_omp_ctxp = target_ctx;
14068 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
14069 OMP_CLAUSE_OPERAND (c, 0) = *p;
14070 }
14071 if (!omp_find_clause (OMP_TARGET_CLAUSES (target), OMP_CLAUSE_THREAD_LIMIT))
14072 {
14073 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
14074 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
14075 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
14076 OMP_TARGET_CLAUSES (target) = c;
14077 }
14078 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
14079 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c) = num_teams_upper;
14080 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = num_teams_lower;
14081 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
14082 OMP_TARGET_CLAUSES (target) = c;
14083 }
14084
14085 /* Gimplify the gross structure of several OMP constructs. */
14086
14087 static void
gimplify_omp_workshare(tree * expr_p,gimple_seq * pre_p)14088 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
14089 {
14090 tree expr = *expr_p;
14091 gimple *stmt;
14092 gimple_seq body = NULL;
14093 enum omp_region_type ort;
14094
14095 switch (TREE_CODE (expr))
14096 {
14097 case OMP_SECTIONS:
14098 case OMP_SINGLE:
14099 ort = ORT_WORKSHARE;
14100 break;
14101 case OMP_SCOPE:
14102 ort = ORT_TASKGROUP;
14103 break;
14104 case OMP_TARGET:
14105 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
14106 break;
14107 case OACC_KERNELS:
14108 ort = ORT_ACC_KERNELS;
14109 break;
14110 case OACC_PARALLEL:
14111 ort = ORT_ACC_PARALLEL;
14112 break;
14113 case OACC_SERIAL:
14114 ort = ORT_ACC_SERIAL;
14115 break;
14116 case OACC_DATA:
14117 ort = ORT_ACC_DATA;
14118 break;
14119 case OMP_TARGET_DATA:
14120 ort = ORT_TARGET_DATA;
14121 break;
14122 case OMP_TEAMS:
14123 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
14124 if (gimplify_omp_ctxp == NULL
14125 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
14126 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
14127 break;
14128 case OACC_HOST_DATA:
14129 ort = ORT_ACC_HOST_DATA;
14130 break;
14131 default:
14132 gcc_unreachable ();
14133 }
14134
14135 bool save_in_omp_construct = in_omp_construct;
14136 if ((ort & ORT_ACC) == 0)
14137 in_omp_construct = false;
14138 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
14139 TREE_CODE (expr));
14140 if (TREE_CODE (expr) == OMP_TARGET)
14141 optimize_target_teams (expr, pre_p);
14142 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
14143 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
14144 {
14145 push_gimplify_context ();
14146 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
14147 if (gimple_code (g) == GIMPLE_BIND)
14148 pop_gimplify_context (g);
14149 else
14150 pop_gimplify_context (NULL);
14151 if ((ort & ORT_TARGET_DATA) != 0)
14152 {
14153 enum built_in_function end_ix;
14154 switch (TREE_CODE (expr))
14155 {
14156 case OACC_DATA:
14157 case OACC_HOST_DATA:
14158 end_ix = BUILT_IN_GOACC_DATA_END;
14159 break;
14160 case OMP_TARGET_DATA:
14161 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
14162 break;
14163 default:
14164 gcc_unreachable ();
14165 }
14166 tree fn = builtin_decl_explicit (end_ix);
14167 g = gimple_build_call (fn, 0);
14168 gimple_seq cleanup = NULL;
14169 gimple_seq_add_stmt (&cleanup, g);
14170 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
14171 body = NULL;
14172 gimple_seq_add_stmt (&body, g);
14173 }
14174 }
14175 else
14176 gimplify_and_add (OMP_BODY (expr), &body);
14177 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
14178 TREE_CODE (expr));
14179 in_omp_construct = save_in_omp_construct;
14180
14181 switch (TREE_CODE (expr))
14182 {
14183 case OACC_DATA:
14184 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
14185 OMP_CLAUSES (expr));
14186 break;
14187 case OACC_HOST_DATA:
14188 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
14189 {
14190 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
14191 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
14192 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
14193 }
14194
14195 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
14196 OMP_CLAUSES (expr));
14197 break;
14198 case OACC_KERNELS:
14199 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
14200 OMP_CLAUSES (expr));
14201 break;
14202 case OACC_PARALLEL:
14203 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
14204 OMP_CLAUSES (expr));
14205 break;
14206 case OACC_SERIAL:
14207 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
14208 OMP_CLAUSES (expr));
14209 break;
14210 case OMP_SECTIONS:
14211 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
14212 break;
14213 case OMP_SINGLE:
14214 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
14215 break;
14216 case OMP_SCOPE:
14217 stmt = gimple_build_omp_scope (body, OMP_CLAUSES (expr));
14218 break;
14219 case OMP_TARGET:
14220 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
14221 OMP_CLAUSES (expr));
14222 break;
14223 case OMP_TARGET_DATA:
14224 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
14225 to be evaluated before the use_device_{ptr,addr} clauses if they
14226 refer to the same variables. */
14227 {
14228 tree use_device_clauses;
14229 tree *pc, *uc = &use_device_clauses;
14230 for (pc = &OMP_CLAUSES (expr); *pc; )
14231 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
14232 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
14233 {
14234 *uc = *pc;
14235 *pc = OMP_CLAUSE_CHAIN (*pc);
14236 uc = &OMP_CLAUSE_CHAIN (*uc);
14237 }
14238 else
14239 pc = &OMP_CLAUSE_CHAIN (*pc);
14240 *uc = NULL_TREE;
14241 *pc = use_device_clauses;
14242 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
14243 OMP_CLAUSES (expr));
14244 }
14245 break;
14246 case OMP_TEAMS:
14247 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
14248 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
14249 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
14250 break;
14251 default:
14252 gcc_unreachable ();
14253 }
14254
14255 gimplify_seq_add_stmt (pre_p, stmt);
14256 *expr_p = NULL_TREE;
14257 }
14258
14259 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
14260 target update constructs. */
14261
14262 static void
gimplify_omp_target_update(tree * expr_p,gimple_seq * pre_p)14263 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
14264 {
14265 tree expr = *expr_p;
14266 int kind;
14267 gomp_target *stmt;
14268 enum omp_region_type ort = ORT_WORKSHARE;
14269
14270 switch (TREE_CODE (expr))
14271 {
14272 case OACC_ENTER_DATA:
14273 kind = GF_OMP_TARGET_KIND_OACC_ENTER_DATA;
14274 ort = ORT_ACC;
14275 break;
14276 case OACC_EXIT_DATA:
14277 kind = GF_OMP_TARGET_KIND_OACC_EXIT_DATA;
14278 ort = ORT_ACC;
14279 break;
14280 case OACC_UPDATE:
14281 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
14282 ort = ORT_ACC;
14283 break;
14284 case OMP_TARGET_UPDATE:
14285 kind = GF_OMP_TARGET_KIND_UPDATE;
14286 break;
14287 case OMP_TARGET_ENTER_DATA:
14288 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
14289 break;
14290 case OMP_TARGET_EXIT_DATA:
14291 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
14292 break;
14293 default:
14294 gcc_unreachable ();
14295 }
14296 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
14297 ort, TREE_CODE (expr));
14298 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
14299 TREE_CODE (expr));
14300 if (TREE_CODE (expr) == OACC_UPDATE
14301 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
14302 OMP_CLAUSE_IF_PRESENT))
14303 {
14304 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
14305 clause. */
14306 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
14307 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
14308 switch (OMP_CLAUSE_MAP_KIND (c))
14309 {
14310 case GOMP_MAP_FORCE_TO:
14311 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
14312 break;
14313 case GOMP_MAP_FORCE_FROM:
14314 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
14315 break;
14316 default:
14317 break;
14318 }
14319 }
14320 else if (TREE_CODE (expr) == OACC_EXIT_DATA
14321 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
14322 OMP_CLAUSE_FINALIZE))
14323 {
14324 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
14325 semantics. */
14326 bool have_clause = false;
14327 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
14328 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
14329 switch (OMP_CLAUSE_MAP_KIND (c))
14330 {
14331 case GOMP_MAP_FROM:
14332 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
14333 have_clause = true;
14334 break;
14335 case GOMP_MAP_RELEASE:
14336 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
14337 have_clause = true;
14338 break;
14339 case GOMP_MAP_TO_PSET:
14340 /* Fortran arrays with descriptors must map that descriptor when
14341 doing standalone "attach" operations (in OpenACC). In that
14342 case GOMP_MAP_TO_PSET appears by itself with no preceding
14343 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
14344 break;
14345 case GOMP_MAP_POINTER:
14346 /* TODO PR92929: we may see these here, but they'll always follow
14347 one of the clauses above, and will be handled by libgomp as
14348 one group, so no handling required here. */
14349 gcc_assert (have_clause);
14350 break;
14351 case GOMP_MAP_DETACH:
14352 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
14353 have_clause = false;
14354 break;
14355 case GOMP_MAP_STRUCT:
14356 have_clause = false;
14357 break;
14358 default:
14359 gcc_unreachable ();
14360 }
14361 }
14362 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
14363
14364 gimplify_seq_add_stmt (pre_p, stmt);
14365 *expr_p = NULL_TREE;
14366 }
14367
14368 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
14369 stabilized the lhs of the atomic operation as *ADDR. Return true if
14370 EXPR is this stabilized form. */
14371
14372 static bool
goa_lhs_expr_p(tree expr,tree addr)14373 goa_lhs_expr_p (tree expr, tree addr)
14374 {
14375 /* Also include casts to other type variants. The C front end is fond
14376 of adding these for e.g. volatile variables. This is like
14377 STRIP_TYPE_NOPS but includes the main variant lookup. */
14378 STRIP_USELESS_TYPE_CONVERSION (expr);
14379
14380 if (TREE_CODE (expr) == INDIRECT_REF)
14381 {
14382 expr = TREE_OPERAND (expr, 0);
14383 while (expr != addr
14384 && (CONVERT_EXPR_P (expr)
14385 || TREE_CODE (expr) == NON_LVALUE_EXPR)
14386 && TREE_CODE (expr) == TREE_CODE (addr)
14387 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
14388 {
14389 expr = TREE_OPERAND (expr, 0);
14390 addr = TREE_OPERAND (addr, 0);
14391 }
14392 if (expr == addr)
14393 return true;
14394 return (TREE_CODE (addr) == ADDR_EXPR
14395 && TREE_CODE (expr) == ADDR_EXPR
14396 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
14397 }
14398 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
14399 return true;
14400 return false;
14401 }
14402
14403 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
14404 expression does not involve the lhs, evaluate it into a temporary.
14405 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
14406 or -1 if an error was encountered. */
14407
14408 static int
goa_stabilize_expr(tree * expr_p,gimple_seq * pre_p,tree lhs_addr,tree lhs_var,tree & target_expr,bool rhs,int depth)14409 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
14410 tree lhs_var, tree &target_expr, bool rhs, int depth)
14411 {
14412 tree expr = *expr_p;
14413 int saw_lhs = 0;
14414
14415 if (goa_lhs_expr_p (expr, lhs_addr))
14416 {
14417 if (pre_p)
14418 *expr_p = lhs_var;
14419 return 1;
14420 }
14421 if (is_gimple_val (expr))
14422 return 0;
14423
14424 /* Maximum depth of lhs in expression is for the
14425 __builtin_clear_padding (...), __builtin_clear_padding (...),
14426 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
14427 if (++depth > 7)
14428 goto finish;
14429
14430 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
14431 {
14432 case tcc_binary:
14433 case tcc_comparison:
14434 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
14435 lhs_var, target_expr, true, depth);
14436 /* FALLTHRU */
14437 case tcc_unary:
14438 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
14439 lhs_var, target_expr, true, depth);
14440 break;
14441 case tcc_expression:
14442 switch (TREE_CODE (expr))
14443 {
14444 case TRUTH_ANDIF_EXPR:
14445 case TRUTH_ORIF_EXPR:
14446 case TRUTH_AND_EXPR:
14447 case TRUTH_OR_EXPR:
14448 case TRUTH_XOR_EXPR:
14449 case BIT_INSERT_EXPR:
14450 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
14451 lhs_addr, lhs_var, target_expr, true,
14452 depth);
14453 /* FALLTHRU */
14454 case TRUTH_NOT_EXPR:
14455 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
14456 lhs_addr, lhs_var, target_expr, true,
14457 depth);
14458 break;
14459 case MODIFY_EXPR:
14460 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
14461 target_expr, true, depth))
14462 break;
14463 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
14464 lhs_addr, lhs_var, target_expr, true,
14465 depth);
14466 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
14467 lhs_addr, lhs_var, target_expr, false,
14468 depth);
14469 break;
14470 /* FALLTHRU */
14471 case ADDR_EXPR:
14472 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
14473 target_expr, true, depth))
14474 break;
14475 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
14476 lhs_addr, lhs_var, target_expr, false,
14477 depth);
14478 break;
14479 case COMPOUND_EXPR:
14480 /* Break out any preevaluations from cp_build_modify_expr. */
14481 for (; TREE_CODE (expr) == COMPOUND_EXPR;
14482 expr = TREE_OPERAND (expr, 1))
14483 {
14484 /* Special-case __builtin_clear_padding call before
14485 __builtin_memcmp. */
14486 if (TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR)
14487 {
14488 tree fndecl = get_callee_fndecl (TREE_OPERAND (expr, 0));
14489 if (fndecl
14490 && fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
14491 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
14492 && (!pre_p
14493 || goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL,
14494 lhs_addr, lhs_var,
14495 target_expr, true, depth)))
14496 {
14497 if (pre_p)
14498 *expr_p = expr;
14499 saw_lhs = goa_stabilize_expr (&TREE_OPERAND (expr, 0),
14500 pre_p, lhs_addr, lhs_var,
14501 target_expr, true, depth);
14502 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1),
14503 pre_p, lhs_addr, lhs_var,
14504 target_expr, rhs, depth);
14505 return saw_lhs;
14506 }
14507 }
14508
14509 if (pre_p)
14510 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
14511 }
14512 if (!pre_p)
14513 return goa_stabilize_expr (&expr, pre_p, lhs_addr, lhs_var,
14514 target_expr, rhs, depth);
14515 *expr_p = expr;
14516 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var,
14517 target_expr, rhs, depth);
14518 case COND_EXPR:
14519 if (!goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL, lhs_addr,
14520 lhs_var, target_expr, true, depth))
14521 break;
14522 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
14523 lhs_addr, lhs_var, target_expr, true,
14524 depth);
14525 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
14526 lhs_addr, lhs_var, target_expr, true,
14527 depth);
14528 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 2), pre_p,
14529 lhs_addr, lhs_var, target_expr, true,
14530 depth);
14531 break;
14532 case TARGET_EXPR:
14533 if (TARGET_EXPR_INITIAL (expr))
14534 {
14535 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr,
14536 lhs_var, target_expr, true,
14537 depth))
14538 break;
14539 if (expr == target_expr)
14540 saw_lhs = 1;
14541 else
14542 {
14543 saw_lhs = goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr),
14544 pre_p, lhs_addr, lhs_var,
14545 target_expr, true, depth);
14546 if (saw_lhs && target_expr == NULL_TREE && pre_p)
14547 target_expr = expr;
14548 }
14549 }
14550 break;
14551 default:
14552 break;
14553 }
14554 break;
14555 case tcc_reference:
14556 if (TREE_CODE (expr) == BIT_FIELD_REF
14557 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
14558 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
14559 lhs_addr, lhs_var, target_expr, true,
14560 depth);
14561 break;
14562 case tcc_vl_exp:
14563 if (TREE_CODE (expr) == CALL_EXPR)
14564 {
14565 if (tree fndecl = get_callee_fndecl (expr))
14566 if (fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
14567 || fndecl_built_in_p (fndecl, BUILT_IN_MEMCMP))
14568 {
14569 int nargs = call_expr_nargs (expr);
14570 for (int i = 0; i < nargs; i++)
14571 saw_lhs |= goa_stabilize_expr (&CALL_EXPR_ARG (expr, i),
14572 pre_p, lhs_addr, lhs_var,
14573 target_expr, true, depth);
14574 }
14575 }
14576 break;
14577 default:
14578 break;
14579 }
14580
14581 finish:
14582 if (saw_lhs == 0 && pre_p)
14583 {
14584 enum gimplify_status gs;
14585 if (TREE_CODE (expr) == CALL_EXPR && VOID_TYPE_P (TREE_TYPE (expr)))
14586 {
14587 gimplify_stmt (&expr, pre_p);
14588 return saw_lhs;
14589 }
14590 else if (rhs)
14591 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
14592 else
14593 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
14594 if (gs != GS_ALL_DONE)
14595 saw_lhs = -1;
14596 }
14597
14598 return saw_lhs;
14599 }
14600
14601 /* Gimplify an OMP_ATOMIC statement. */
14602
14603 static enum gimplify_status
gimplify_omp_atomic(tree * expr_p,gimple_seq * pre_p)14604 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
14605 {
14606 tree addr = TREE_OPERAND (*expr_p, 0);
14607 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
14608 ? NULL : TREE_OPERAND (*expr_p, 1);
14609 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
14610 tree tmp_load;
14611 gomp_atomic_load *loadstmt;
14612 gomp_atomic_store *storestmt;
14613 tree target_expr = NULL_TREE;
14614
14615 tmp_load = create_tmp_reg (type);
14616 if (rhs
14617 && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load, target_expr,
14618 true, 0) < 0)
14619 return GS_ERROR;
14620
14621 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
14622 != GS_ALL_DONE)
14623 return GS_ERROR;
14624
14625 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
14626 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
14627 gimplify_seq_add_stmt (pre_p, loadstmt);
14628 if (rhs)
14629 {
14630 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
14631 representatives. Use BIT_FIELD_REF on the lhs instead. */
14632 tree rhsarg = rhs;
14633 if (TREE_CODE (rhs) == COND_EXPR)
14634 rhsarg = TREE_OPERAND (rhs, 1);
14635 if (TREE_CODE (rhsarg) == BIT_INSERT_EXPR
14636 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
14637 {
14638 tree bitpos = TREE_OPERAND (rhsarg, 2);
14639 tree op1 = TREE_OPERAND (rhsarg, 1);
14640 tree bitsize;
14641 tree tmp_store = tmp_load;
14642 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
14643 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
14644 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
14645 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
14646 else
14647 bitsize = TYPE_SIZE (TREE_TYPE (op1));
14648 gcc_assert (TREE_OPERAND (rhsarg, 0) == tmp_load);
14649 tree t = build2_loc (EXPR_LOCATION (rhsarg),
14650 MODIFY_EXPR, void_type_node,
14651 build3_loc (EXPR_LOCATION (rhsarg),
14652 BIT_FIELD_REF, TREE_TYPE (op1),
14653 tmp_store, bitsize, bitpos), op1);
14654 if (TREE_CODE (rhs) == COND_EXPR)
14655 t = build3_loc (EXPR_LOCATION (rhs), COND_EXPR, void_type_node,
14656 TREE_OPERAND (rhs, 0), t, void_node);
14657 gimplify_and_add (t, pre_p);
14658 rhs = tmp_store;
14659 }
14660 bool save_allow_rhs_cond_expr = gimplify_ctxp->allow_rhs_cond_expr;
14661 if (TREE_CODE (rhs) == COND_EXPR)
14662 gimplify_ctxp->allow_rhs_cond_expr = true;
14663 enum gimplify_status gs = gimplify_expr (&rhs, pre_p, NULL,
14664 is_gimple_val, fb_rvalue);
14665 gimplify_ctxp->allow_rhs_cond_expr = save_allow_rhs_cond_expr;
14666 if (gs != GS_ALL_DONE)
14667 return GS_ERROR;
14668 }
14669
14670 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
14671 rhs = tmp_load;
14672 storestmt
14673 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
14674 if (TREE_CODE (*expr_p) != OMP_ATOMIC_READ && OMP_ATOMIC_WEAK (*expr_p))
14675 {
14676 gimple_omp_atomic_set_weak (loadstmt);
14677 gimple_omp_atomic_set_weak (storestmt);
14678 }
14679 gimplify_seq_add_stmt (pre_p, storestmt);
14680 switch (TREE_CODE (*expr_p))
14681 {
14682 case OMP_ATOMIC_READ:
14683 case OMP_ATOMIC_CAPTURE_OLD:
14684 *expr_p = tmp_load;
14685 gimple_omp_atomic_set_need_value (loadstmt);
14686 break;
14687 case OMP_ATOMIC_CAPTURE_NEW:
14688 *expr_p = rhs;
14689 gimple_omp_atomic_set_need_value (storestmt);
14690 break;
14691 default:
14692 *expr_p = NULL;
14693 break;
14694 }
14695
14696 return GS_ALL_DONE;
14697 }
14698
14699 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
14700 body, and adding some EH bits. */
14701
14702 static enum gimplify_status
gimplify_transaction(tree * expr_p,gimple_seq * pre_p)14703 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
14704 {
14705 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
14706 gimple *body_stmt;
14707 gtransaction *trans_stmt;
14708 gimple_seq body = NULL;
14709 int subcode = 0;
14710
14711 /* Wrap the transaction body in a BIND_EXPR so we have a context
14712 where to put decls for OMP. */
14713 if (TREE_CODE (tbody) != BIND_EXPR)
14714 {
14715 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
14716 TREE_SIDE_EFFECTS (bind) = 1;
14717 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
14718 TRANSACTION_EXPR_BODY (expr) = bind;
14719 }
14720
14721 push_gimplify_context ();
14722 temp = voidify_wrapper_expr (*expr_p, NULL);
14723
14724 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
14725 pop_gimplify_context (body_stmt);
14726
14727 trans_stmt = gimple_build_transaction (body);
14728 if (TRANSACTION_EXPR_OUTER (expr))
14729 subcode = GTMA_IS_OUTER;
14730 else if (TRANSACTION_EXPR_RELAXED (expr))
14731 subcode = GTMA_IS_RELAXED;
14732 gimple_transaction_set_subcode (trans_stmt, subcode);
14733
14734 gimplify_seq_add_stmt (pre_p, trans_stmt);
14735
14736 if (temp)
14737 {
14738 *expr_p = temp;
14739 return GS_OK;
14740 }
14741
14742 *expr_p = NULL_TREE;
14743 return GS_ALL_DONE;
14744 }
14745
14746 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
14747 is the OMP_BODY of the original EXPR (which has already been
14748 gimplified so it's not present in the EXPR).
14749
14750 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
14751
14752 static gimple *
gimplify_omp_ordered(tree expr,gimple_seq body)14753 gimplify_omp_ordered (tree expr, gimple_seq body)
14754 {
14755 tree c, decls;
14756 int failures = 0;
14757 unsigned int i;
14758 tree source_c = NULL_TREE;
14759 tree sink_c = NULL_TREE;
14760
14761 if (gimplify_omp_ctxp)
14762 {
14763 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
14764 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
14765 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
14766 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
14767 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
14768 {
14769 error_at (OMP_CLAUSE_LOCATION (c),
14770 "%<ordered%> construct with %<depend%> clause must be "
14771 "closely nested inside a loop with %<ordered%> clause "
14772 "with a parameter");
14773 failures++;
14774 }
14775 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
14776 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
14777 {
14778 bool fail = false;
14779 for (decls = OMP_CLAUSE_DECL (c), i = 0;
14780 decls && TREE_CODE (decls) == TREE_LIST;
14781 decls = TREE_CHAIN (decls), ++i)
14782 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
14783 continue;
14784 else if (TREE_VALUE (decls)
14785 != gimplify_omp_ctxp->loop_iter_var[2 * i])
14786 {
14787 error_at (OMP_CLAUSE_LOCATION (c),
14788 "variable %qE is not an iteration "
14789 "of outermost loop %d, expected %qE",
14790 TREE_VALUE (decls), i + 1,
14791 gimplify_omp_ctxp->loop_iter_var[2 * i]);
14792 fail = true;
14793 failures++;
14794 }
14795 else
14796 TREE_VALUE (decls)
14797 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
14798 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
14799 {
14800 error_at (OMP_CLAUSE_LOCATION (c),
14801 "number of variables in %<depend%> clause with "
14802 "%<sink%> modifier does not match number of "
14803 "iteration variables");
14804 failures++;
14805 }
14806 sink_c = c;
14807 }
14808 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
14809 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
14810 {
14811 if (source_c)
14812 {
14813 error_at (OMP_CLAUSE_LOCATION (c),
14814 "more than one %<depend%> clause with %<source%> "
14815 "modifier on an %<ordered%> construct");
14816 failures++;
14817 }
14818 else
14819 source_c = c;
14820 }
14821 }
14822 if (source_c && sink_c)
14823 {
14824 error_at (OMP_CLAUSE_LOCATION (source_c),
14825 "%<depend%> clause with %<source%> modifier specified "
14826 "together with %<depend%> clauses with %<sink%> modifier "
14827 "on the same construct");
14828 failures++;
14829 }
14830
14831 if (failures)
14832 return gimple_build_nop ();
14833 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
14834 }
14835
14836 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
14837 expression produces a value to be used as an operand inside a GIMPLE
14838 statement, the value will be stored back in *EXPR_P. This value will
14839 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
14840 an SSA_NAME. The corresponding sequence of GIMPLE statements is
14841 emitted in PRE_P and POST_P.
14842
14843 Additionally, this process may overwrite parts of the input
14844 expression during gimplification. Ideally, it should be
14845 possible to do non-destructive gimplification.
14846
14847 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
14848 the expression needs to evaluate to a value to be used as
14849 an operand in a GIMPLE statement, this value will be stored in
14850 *EXPR_P on exit. This happens when the caller specifies one
14851 of fb_lvalue or fb_rvalue fallback flags.
14852
14853 PRE_P will contain the sequence of GIMPLE statements corresponding
14854 to the evaluation of EXPR and all the side-effects that must
14855 be executed before the main expression. On exit, the last
14856 statement of PRE_P is the core statement being gimplified. For
14857 instance, when gimplifying 'if (++a)' the last statement in
14858 PRE_P will be 'if (t.1)' where t.1 is the result of
14859 pre-incrementing 'a'.
14860
14861 POST_P will contain the sequence of GIMPLE statements corresponding
14862 to the evaluation of all the side-effects that must be executed
14863 after the main expression. If this is NULL, the post
14864 side-effects are stored at the end of PRE_P.
14865
14866 The reason why the output is split in two is to handle post
14867 side-effects explicitly. In some cases, an expression may have
14868 inner and outer post side-effects which need to be emitted in
14869 an order different from the one given by the recursive
14870 traversal. For instance, for the expression (*p--)++ the post
14871 side-effects of '--' must actually occur *after* the post
14872 side-effects of '++'. However, gimplification will first visit
14873 the inner expression, so if a separate POST sequence was not
14874 used, the resulting sequence would be:
14875
14876 1 t.1 = *p
14877 2 p = p - 1
14878 3 t.2 = t.1 + 1
14879 4 *p = t.2
14880
14881 However, the post-decrement operation in line #2 must not be
14882 evaluated until after the store to *p at line #4, so the
14883 correct sequence should be:
14884
14885 1 t.1 = *p
14886 2 t.2 = t.1 + 1
14887 3 *p = t.2
14888 4 p = p - 1
14889
14890 So, by specifying a separate post queue, it is possible
14891 to emit the post side-effects in the correct order.
14892 If POST_P is NULL, an internal queue will be used. Before
14893 returning to the caller, the sequence POST_P is appended to
14894 the main output sequence PRE_P.
14895
14896 GIMPLE_TEST_F points to a function that takes a tree T and
14897 returns nonzero if T is in the GIMPLE form requested by the
14898 caller. The GIMPLE predicates are in gimple.cc.
14899
14900 FALLBACK tells the function what sort of a temporary we want if
14901 gimplification cannot produce an expression that complies with
14902 GIMPLE_TEST_F.
14903
14904 fb_none means that no temporary should be generated
14905 fb_rvalue means that an rvalue is OK to generate
14906 fb_lvalue means that an lvalue is OK to generate
14907 fb_either means that either is OK, but an lvalue is preferable.
14908 fb_mayfail means that gimplification may fail (in which case
14909 GS_ERROR will be returned)
14910
14911 The return value is either GS_ERROR or GS_ALL_DONE, since this
14912 function iterates until EXPR is completely gimplified or an error
14913 occurs. */
14914
14915 enum gimplify_status
gimplify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool (* gimple_test_f)(tree),fallback_t fallback)14916 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
14917 bool (*gimple_test_f) (tree), fallback_t fallback)
14918 {
14919 tree tmp;
14920 gimple_seq internal_pre = NULL;
14921 gimple_seq internal_post = NULL;
14922 tree save_expr;
14923 bool is_statement;
14924 location_t saved_location;
14925 enum gimplify_status ret;
14926 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
14927 tree label;
14928
14929 save_expr = *expr_p;
14930 if (save_expr == NULL_TREE)
14931 return GS_ALL_DONE;
14932
14933 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
14934 is_statement = gimple_test_f == is_gimple_stmt;
14935 if (is_statement)
14936 gcc_assert (pre_p);
14937
14938 /* Consistency checks. */
14939 if (gimple_test_f == is_gimple_reg)
14940 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
14941 else if (gimple_test_f == is_gimple_val
14942 || gimple_test_f == is_gimple_call_addr
14943 || gimple_test_f == is_gimple_condexpr
14944 || gimple_test_f == is_gimple_condexpr_for_cond
14945 || gimple_test_f == is_gimple_mem_rhs
14946 || gimple_test_f == is_gimple_mem_rhs_or_call
14947 || gimple_test_f == is_gimple_reg_rhs
14948 || gimple_test_f == is_gimple_reg_rhs_or_call
14949 || gimple_test_f == is_gimple_asm_val
14950 || gimple_test_f == is_gimple_mem_ref_addr)
14951 gcc_assert (fallback & fb_rvalue);
14952 else if (gimple_test_f == is_gimple_min_lval
14953 || gimple_test_f == is_gimple_lvalue)
14954 gcc_assert (fallback & fb_lvalue);
14955 else if (gimple_test_f == is_gimple_addressable)
14956 gcc_assert (fallback & fb_either);
14957 else if (gimple_test_f == is_gimple_stmt)
14958 gcc_assert (fallback == fb_none);
14959 else
14960 {
14961 /* We should have recognized the GIMPLE_TEST_F predicate to
14962 know what kind of fallback to use in case a temporary is
14963 needed to hold the value or address of *EXPR_P. */
14964 gcc_unreachable ();
14965 }
14966
14967 /* We used to check the predicate here and return immediately if it
14968 succeeds. This is wrong; the design is for gimplification to be
14969 idempotent, and for the predicates to only test for valid forms, not
14970 whether they are fully simplified. */
14971 if (pre_p == NULL)
14972 pre_p = &internal_pre;
14973
14974 if (post_p == NULL)
14975 post_p = &internal_post;
14976
14977 /* Remember the last statements added to PRE_P and POST_P. Every
14978 new statement added by the gimplification helpers needs to be
14979 annotated with location information. To centralize the
14980 responsibility, we remember the last statement that had been
14981 added to both queues before gimplifying *EXPR_P. If
14982 gimplification produces new statements in PRE_P and POST_P, those
14983 statements will be annotated with the same location information
14984 as *EXPR_P. */
14985 pre_last_gsi = gsi_last (*pre_p);
14986 post_last_gsi = gsi_last (*post_p);
14987
14988 saved_location = input_location;
14989 if (save_expr != error_mark_node
14990 && EXPR_HAS_LOCATION (*expr_p))
14991 input_location = EXPR_LOCATION (*expr_p);
14992
14993 /* Loop over the specific gimplifiers until the toplevel node
14994 remains the same. */
14995 do
14996 {
14997 /* Strip away as many useless type conversions as possible
14998 at the toplevel. */
14999 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
15000
15001 /* Remember the expr. */
15002 save_expr = *expr_p;
15003
15004 /* Die, die, die, my darling. */
15005 if (error_operand_p (save_expr))
15006 {
15007 ret = GS_ERROR;
15008 break;
15009 }
15010
15011 /* Do any language-specific gimplification. */
15012 ret = ((enum gimplify_status)
15013 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
15014 if (ret == GS_OK)
15015 {
15016 if (*expr_p == NULL_TREE)
15017 break;
15018 if (*expr_p != save_expr)
15019 continue;
15020 }
15021 else if (ret != GS_UNHANDLED)
15022 break;
15023
15024 /* Make sure that all the cases set 'ret' appropriately. */
15025 ret = GS_UNHANDLED;
15026 switch (TREE_CODE (*expr_p))
15027 {
15028 /* First deal with the special cases. */
15029
15030 case POSTINCREMENT_EXPR:
15031 case POSTDECREMENT_EXPR:
15032 case PREINCREMENT_EXPR:
15033 case PREDECREMENT_EXPR:
15034 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
15035 fallback != fb_none,
15036 TREE_TYPE (*expr_p));
15037 break;
15038
15039 case VIEW_CONVERT_EXPR:
15040 if ((fallback & fb_rvalue)
15041 && is_gimple_reg_type (TREE_TYPE (*expr_p))
15042 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
15043 {
15044 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
15045 post_p, is_gimple_val, fb_rvalue);
15046 recalculate_side_effects (*expr_p);
15047 break;
15048 }
15049 /* Fallthru. */
15050
15051 case ARRAY_REF:
15052 case ARRAY_RANGE_REF:
15053 case REALPART_EXPR:
15054 case IMAGPART_EXPR:
15055 case COMPONENT_REF:
15056 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
15057 fallback ? fallback : fb_rvalue);
15058 break;
15059
15060 case COND_EXPR:
15061 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
15062
15063 /* C99 code may assign to an array in a structure value of a
15064 conditional expression, and this has undefined behavior
15065 only on execution, so create a temporary if an lvalue is
15066 required. */
15067 if (fallback == fb_lvalue)
15068 {
15069 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
15070 mark_addressable (*expr_p);
15071 ret = GS_OK;
15072 }
15073 break;
15074
15075 case CALL_EXPR:
15076 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
15077
15078 /* C99 code may assign to an array in a structure returned
15079 from a function, and this has undefined behavior only on
15080 execution, so create a temporary if an lvalue is
15081 required. */
15082 if (fallback == fb_lvalue)
15083 {
15084 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
15085 mark_addressable (*expr_p);
15086 ret = GS_OK;
15087 }
15088 break;
15089
15090 case TREE_LIST:
15091 gcc_unreachable ();
15092
15093 case COMPOUND_EXPR:
15094 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
15095 break;
15096
15097 case COMPOUND_LITERAL_EXPR:
15098 ret = gimplify_compound_literal_expr (expr_p, pre_p,
15099 gimple_test_f, fallback);
15100 break;
15101
15102 case MODIFY_EXPR:
15103 case INIT_EXPR:
15104 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
15105 fallback != fb_none);
15106 break;
15107
15108 case TRUTH_ANDIF_EXPR:
15109 case TRUTH_ORIF_EXPR:
15110 {
15111 /* Preserve the original type of the expression and the
15112 source location of the outer expression. */
15113 tree org_type = TREE_TYPE (*expr_p);
15114 *expr_p = gimple_boolify (*expr_p);
15115 *expr_p = build3_loc (input_location, COND_EXPR,
15116 org_type, *expr_p,
15117 fold_convert_loc
15118 (input_location,
15119 org_type, boolean_true_node),
15120 fold_convert_loc
15121 (input_location,
15122 org_type, boolean_false_node));
15123 ret = GS_OK;
15124 break;
15125 }
15126
15127 case TRUTH_NOT_EXPR:
15128 {
15129 tree type = TREE_TYPE (*expr_p);
15130 /* The parsers are careful to generate TRUTH_NOT_EXPR
15131 only with operands that are always zero or one.
15132 We do not fold here but handle the only interesting case
15133 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
15134 *expr_p = gimple_boolify (*expr_p);
15135 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
15136 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
15137 TREE_TYPE (*expr_p),
15138 TREE_OPERAND (*expr_p, 0));
15139 else
15140 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
15141 TREE_TYPE (*expr_p),
15142 TREE_OPERAND (*expr_p, 0),
15143 build_int_cst (TREE_TYPE (*expr_p), 1));
15144 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
15145 *expr_p = fold_convert_loc (input_location, type, *expr_p);
15146 ret = GS_OK;
15147 break;
15148 }
15149
15150 case ADDR_EXPR:
15151 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
15152 break;
15153
15154 case ANNOTATE_EXPR:
15155 {
15156 tree cond = TREE_OPERAND (*expr_p, 0);
15157 tree kind = TREE_OPERAND (*expr_p, 1);
15158 tree data = TREE_OPERAND (*expr_p, 2);
15159 tree type = TREE_TYPE (cond);
15160 if (!INTEGRAL_TYPE_P (type))
15161 {
15162 *expr_p = cond;
15163 ret = GS_OK;
15164 break;
15165 }
15166 tree tmp = create_tmp_var (type);
15167 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
15168 gcall *call
15169 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
15170 gimple_call_set_lhs (call, tmp);
15171 gimplify_seq_add_stmt (pre_p, call);
15172 *expr_p = tmp;
15173 ret = GS_ALL_DONE;
15174 break;
15175 }
15176
15177 case VA_ARG_EXPR:
15178 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
15179 break;
15180
15181 CASE_CONVERT:
15182 if (IS_EMPTY_STMT (*expr_p))
15183 {
15184 ret = GS_ALL_DONE;
15185 break;
15186 }
15187
15188 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
15189 || fallback == fb_none)
15190 {
15191 /* Just strip a conversion to void (or in void context) and
15192 try again. */
15193 *expr_p = TREE_OPERAND (*expr_p, 0);
15194 ret = GS_OK;
15195 break;
15196 }
15197
15198 ret = gimplify_conversion (expr_p);
15199 if (ret == GS_ERROR)
15200 break;
15201 if (*expr_p != save_expr)
15202 break;
15203 /* FALLTHRU */
15204
15205 case FIX_TRUNC_EXPR:
15206 /* unary_expr: ... | '(' cast ')' val | ... */
15207 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
15208 is_gimple_val, fb_rvalue);
15209 recalculate_side_effects (*expr_p);
15210 break;
15211
15212 case INDIRECT_REF:
15213 {
15214 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
15215 bool notrap = TREE_THIS_NOTRAP (*expr_p);
15216 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
15217
15218 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
15219 if (*expr_p != save_expr)
15220 {
15221 ret = GS_OK;
15222 break;
15223 }
15224
15225 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
15226 is_gimple_reg, fb_rvalue);
15227 if (ret == GS_ERROR)
15228 break;
15229
15230 recalculate_side_effects (*expr_p);
15231 *expr_p = fold_build2_loc (input_location, MEM_REF,
15232 TREE_TYPE (*expr_p),
15233 TREE_OPERAND (*expr_p, 0),
15234 build_int_cst (saved_ptr_type, 0));
15235 TREE_THIS_VOLATILE (*expr_p) = volatilep;
15236 TREE_THIS_NOTRAP (*expr_p) = notrap;
15237 ret = GS_OK;
15238 break;
15239 }
15240
15241 /* We arrive here through the various re-gimplifcation paths. */
15242 case MEM_REF:
15243 /* First try re-folding the whole thing. */
15244 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
15245 TREE_OPERAND (*expr_p, 0),
15246 TREE_OPERAND (*expr_p, 1));
15247 if (tmp)
15248 {
15249 REF_REVERSE_STORAGE_ORDER (tmp)
15250 = REF_REVERSE_STORAGE_ORDER (*expr_p);
15251 *expr_p = tmp;
15252 recalculate_side_effects (*expr_p);
15253 ret = GS_OK;
15254 break;
15255 }
15256 /* Avoid re-gimplifying the address operand if it is already
15257 in suitable form. Re-gimplifying would mark the address
15258 operand addressable. Always gimplify when not in SSA form
15259 as we still may have to gimplify decls with value-exprs. */
15260 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
15261 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
15262 {
15263 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
15264 is_gimple_mem_ref_addr, fb_rvalue);
15265 if (ret == GS_ERROR)
15266 break;
15267 }
15268 recalculate_side_effects (*expr_p);
15269 ret = GS_ALL_DONE;
15270 break;
15271
15272 /* Constants need not be gimplified. */
15273 case INTEGER_CST:
15274 case REAL_CST:
15275 case FIXED_CST:
15276 case STRING_CST:
15277 case COMPLEX_CST:
15278 case VECTOR_CST:
15279 /* Drop the overflow flag on constants, we do not want
15280 that in the GIMPLE IL. */
15281 if (TREE_OVERFLOW_P (*expr_p))
15282 *expr_p = drop_tree_overflow (*expr_p);
15283 ret = GS_ALL_DONE;
15284 break;
15285
15286 case CONST_DECL:
15287 /* If we require an lvalue, such as for ADDR_EXPR, retain the
15288 CONST_DECL node. Otherwise the decl is replaceable by its
15289 value. */
15290 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
15291 if (fallback & fb_lvalue)
15292 ret = GS_ALL_DONE;
15293 else
15294 {
15295 *expr_p = DECL_INITIAL (*expr_p);
15296 ret = GS_OK;
15297 }
15298 break;
15299
15300 case DECL_EXPR:
15301 ret = gimplify_decl_expr (expr_p, pre_p);
15302 break;
15303
15304 case BIND_EXPR:
15305 ret = gimplify_bind_expr (expr_p, pre_p);
15306 break;
15307
15308 case LOOP_EXPR:
15309 ret = gimplify_loop_expr (expr_p, pre_p);
15310 break;
15311
15312 case SWITCH_EXPR:
15313 ret = gimplify_switch_expr (expr_p, pre_p);
15314 break;
15315
15316 case EXIT_EXPR:
15317 ret = gimplify_exit_expr (expr_p);
15318 break;
15319
15320 case GOTO_EXPR:
15321 /* If the target is not LABEL, then it is a computed jump
15322 and the target needs to be gimplified. */
15323 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
15324 {
15325 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
15326 NULL, is_gimple_val, fb_rvalue);
15327 if (ret == GS_ERROR)
15328 break;
15329 }
15330 gimplify_seq_add_stmt (pre_p,
15331 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
15332 ret = GS_ALL_DONE;
15333 break;
15334
15335 case PREDICT_EXPR:
15336 gimplify_seq_add_stmt (pre_p,
15337 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
15338 PREDICT_EXPR_OUTCOME (*expr_p)));
15339 ret = GS_ALL_DONE;
15340 break;
15341
15342 case LABEL_EXPR:
15343 ret = gimplify_label_expr (expr_p, pre_p);
15344 label = LABEL_EXPR_LABEL (*expr_p);
15345 gcc_assert (decl_function_context (label) == current_function_decl);
15346
15347 /* If the label is used in a goto statement, or address of the label
15348 is taken, we need to unpoison all variables that were seen so far.
15349 Doing so would prevent us from reporting a false positives. */
15350 if (asan_poisoned_variables
15351 && asan_used_labels != NULL
15352 && asan_used_labels->contains (label)
15353 && !gimplify_omp_ctxp)
15354 asan_poison_variables (asan_poisoned_variables, false, pre_p);
15355 break;
15356
15357 case CASE_LABEL_EXPR:
15358 ret = gimplify_case_label_expr (expr_p, pre_p);
15359
15360 if (gimplify_ctxp->live_switch_vars)
15361 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
15362 pre_p);
15363 break;
15364
15365 case RETURN_EXPR:
15366 ret = gimplify_return_expr (*expr_p, pre_p);
15367 break;
15368
15369 case CONSTRUCTOR:
15370 /* Don't reduce this in place; let gimplify_init_constructor work its
15371 magic. Buf if we're just elaborating this for side effects, just
15372 gimplify any element that has side-effects. */
15373 if (fallback == fb_none)
15374 {
15375 unsigned HOST_WIDE_INT ix;
15376 tree val;
15377 tree temp = NULL_TREE;
15378 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
15379 if (TREE_SIDE_EFFECTS (val))
15380 append_to_statement_list (val, &temp);
15381
15382 *expr_p = temp;
15383 ret = temp ? GS_OK : GS_ALL_DONE;
15384 }
15385 /* C99 code may assign to an array in a constructed
15386 structure or union, and this has undefined behavior only
15387 on execution, so create a temporary if an lvalue is
15388 required. */
15389 else if (fallback == fb_lvalue)
15390 {
15391 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
15392 mark_addressable (*expr_p);
15393 ret = GS_OK;
15394 }
15395 else
15396 ret = GS_ALL_DONE;
15397 break;
15398
15399 /* The following are special cases that are not handled by the
15400 original GIMPLE grammar. */
15401
15402 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
15403 eliminated. */
15404 case SAVE_EXPR:
15405 ret = gimplify_save_expr (expr_p, pre_p, post_p);
15406 break;
15407
15408 case BIT_FIELD_REF:
15409 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
15410 post_p, is_gimple_lvalue, fb_either);
15411 recalculate_side_effects (*expr_p);
15412 break;
15413
15414 case TARGET_MEM_REF:
15415 {
15416 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
15417
15418 if (TMR_BASE (*expr_p))
15419 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
15420 post_p, is_gimple_mem_ref_addr, fb_either);
15421 if (TMR_INDEX (*expr_p))
15422 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
15423 post_p, is_gimple_val, fb_rvalue);
15424 if (TMR_INDEX2 (*expr_p))
15425 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
15426 post_p, is_gimple_val, fb_rvalue);
15427 /* TMR_STEP and TMR_OFFSET are always integer constants. */
15428 ret = MIN (r0, r1);
15429 }
15430 break;
15431
15432 case NON_LVALUE_EXPR:
15433 /* This should have been stripped above. */
15434 gcc_unreachable ();
15435
15436 case ASM_EXPR:
15437 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
15438 break;
15439
15440 case TRY_FINALLY_EXPR:
15441 case TRY_CATCH_EXPR:
15442 {
15443 gimple_seq eval, cleanup;
15444 gtry *try_;
15445
15446 /* Calls to destructors are generated automatically in FINALLY/CATCH
15447 block. They should have location as UNKNOWN_LOCATION. However,
15448 gimplify_call_expr will reset these call stmts to input_location
15449 if it finds stmt's location is unknown. To prevent resetting for
15450 destructors, we set the input_location to unknown.
15451 Note that this only affects the destructor calls in FINALLY/CATCH
15452 block, and will automatically reset to its original value by the
15453 end of gimplify_expr. */
15454 input_location = UNKNOWN_LOCATION;
15455 eval = cleanup = NULL;
15456 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
15457 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
15458 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
15459 {
15460 gimple_seq n = NULL, e = NULL;
15461 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
15462 0), &n);
15463 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
15464 1), &e);
15465 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
15466 {
15467 geh_else *stmt = gimple_build_eh_else (n, e);
15468 gimple_seq_add_stmt (&cleanup, stmt);
15469 }
15470 }
15471 else
15472 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
15473 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
15474 if (gimple_seq_empty_p (cleanup))
15475 {
15476 gimple_seq_add_seq (pre_p, eval);
15477 ret = GS_ALL_DONE;
15478 break;
15479 }
15480 try_ = gimple_build_try (eval, cleanup,
15481 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
15482 ? GIMPLE_TRY_FINALLY
15483 : GIMPLE_TRY_CATCH);
15484 if (EXPR_HAS_LOCATION (save_expr))
15485 gimple_set_location (try_, EXPR_LOCATION (save_expr));
15486 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
15487 gimple_set_location (try_, saved_location);
15488 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
15489 gimple_try_set_catch_is_cleanup (try_,
15490 TRY_CATCH_IS_CLEANUP (*expr_p));
15491 gimplify_seq_add_stmt (pre_p, try_);
15492 ret = GS_ALL_DONE;
15493 break;
15494 }
15495
15496 case CLEANUP_POINT_EXPR:
15497 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
15498 break;
15499
15500 case TARGET_EXPR:
15501 ret = gimplify_target_expr (expr_p, pre_p, post_p);
15502 break;
15503
15504 case CATCH_EXPR:
15505 {
15506 gimple *c;
15507 gimple_seq handler = NULL;
15508 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
15509 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
15510 gimplify_seq_add_stmt (pre_p, c);
15511 ret = GS_ALL_DONE;
15512 break;
15513 }
15514
15515 case EH_FILTER_EXPR:
15516 {
15517 gimple *ehf;
15518 gimple_seq failure = NULL;
15519
15520 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
15521 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
15522 copy_warning (ehf, *expr_p);
15523 gimplify_seq_add_stmt (pre_p, ehf);
15524 ret = GS_ALL_DONE;
15525 break;
15526 }
15527
15528 case OBJ_TYPE_REF:
15529 {
15530 enum gimplify_status r0, r1;
15531 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
15532 post_p, is_gimple_val, fb_rvalue);
15533 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
15534 post_p, is_gimple_val, fb_rvalue);
15535 TREE_SIDE_EFFECTS (*expr_p) = 0;
15536 ret = MIN (r0, r1);
15537 }
15538 break;
15539
15540 case LABEL_DECL:
15541 /* We get here when taking the address of a label. We mark
15542 the label as "forced"; meaning it can never be removed and
15543 it is a potential target for any computed goto. */
15544 FORCED_LABEL (*expr_p) = 1;
15545 ret = GS_ALL_DONE;
15546 break;
15547
15548 case STATEMENT_LIST:
15549 ret = gimplify_statement_list (expr_p, pre_p);
15550 break;
15551
15552 case WITH_SIZE_EXPR:
15553 {
15554 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
15555 post_p == &internal_post ? NULL : post_p,
15556 gimple_test_f, fallback);
15557 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
15558 is_gimple_val, fb_rvalue);
15559 ret = GS_ALL_DONE;
15560 }
15561 break;
15562
15563 case VAR_DECL:
15564 case PARM_DECL:
15565 ret = gimplify_var_or_parm_decl (expr_p);
15566 break;
15567
15568 case RESULT_DECL:
15569 /* When within an OMP context, notice uses of variables. */
15570 if (gimplify_omp_ctxp)
15571 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
15572 ret = GS_ALL_DONE;
15573 break;
15574
15575 case DEBUG_EXPR_DECL:
15576 gcc_unreachable ();
15577
15578 case DEBUG_BEGIN_STMT:
15579 gimplify_seq_add_stmt (pre_p,
15580 gimple_build_debug_begin_stmt
15581 (TREE_BLOCK (*expr_p),
15582 EXPR_LOCATION (*expr_p)));
15583 ret = GS_ALL_DONE;
15584 *expr_p = NULL;
15585 break;
15586
15587 case SSA_NAME:
15588 /* Allow callbacks into the gimplifier during optimization. */
15589 ret = GS_ALL_DONE;
15590 break;
15591
15592 case OMP_PARALLEL:
15593 gimplify_omp_parallel (expr_p, pre_p);
15594 ret = GS_ALL_DONE;
15595 break;
15596
15597 case OMP_TASK:
15598 gimplify_omp_task (expr_p, pre_p);
15599 ret = GS_ALL_DONE;
15600 break;
15601
15602 case OMP_SIMD:
15603 {
15604 /* Temporarily disable into_ssa, as scan_omp_simd
15605 which calls copy_gimple_seq_and_replace_locals can't deal
15606 with SSA_NAMEs defined outside of the body properly. */
15607 bool saved_into_ssa = gimplify_ctxp->into_ssa;
15608 gimplify_ctxp->into_ssa = false;
15609 ret = gimplify_omp_for (expr_p, pre_p);
15610 gimplify_ctxp->into_ssa = saved_into_ssa;
15611 break;
15612 }
15613
15614 case OMP_FOR:
15615 case OMP_DISTRIBUTE:
15616 case OMP_TASKLOOP:
15617 case OACC_LOOP:
15618 ret = gimplify_omp_for (expr_p, pre_p);
15619 break;
15620
15621 case OMP_LOOP:
15622 ret = gimplify_omp_loop (expr_p, pre_p);
15623 break;
15624
15625 case OACC_CACHE:
15626 gimplify_oacc_cache (expr_p, pre_p);
15627 ret = GS_ALL_DONE;
15628 break;
15629
15630 case OACC_DECLARE:
15631 gimplify_oacc_declare (expr_p, pre_p);
15632 ret = GS_ALL_DONE;
15633 break;
15634
15635 case OACC_HOST_DATA:
15636 case OACC_DATA:
15637 case OACC_KERNELS:
15638 case OACC_PARALLEL:
15639 case OACC_SERIAL:
15640 case OMP_SCOPE:
15641 case OMP_SECTIONS:
15642 case OMP_SINGLE:
15643 case OMP_TARGET:
15644 case OMP_TARGET_DATA:
15645 case OMP_TEAMS:
15646 gimplify_omp_workshare (expr_p, pre_p);
15647 ret = GS_ALL_DONE;
15648 break;
15649
15650 case OACC_ENTER_DATA:
15651 case OACC_EXIT_DATA:
15652 case OACC_UPDATE:
15653 case OMP_TARGET_UPDATE:
15654 case OMP_TARGET_ENTER_DATA:
15655 case OMP_TARGET_EXIT_DATA:
15656 gimplify_omp_target_update (expr_p, pre_p);
15657 ret = GS_ALL_DONE;
15658 break;
15659
15660 case OMP_SECTION:
15661 case OMP_MASTER:
15662 case OMP_MASKED:
15663 case OMP_ORDERED:
15664 case OMP_CRITICAL:
15665 case OMP_SCAN:
15666 {
15667 gimple_seq body = NULL;
15668 gimple *g;
15669 bool saved_in_omp_construct = in_omp_construct;
15670
15671 in_omp_construct = true;
15672 gimplify_and_add (OMP_BODY (*expr_p), &body);
15673 in_omp_construct = saved_in_omp_construct;
15674 switch (TREE_CODE (*expr_p))
15675 {
15676 case OMP_SECTION:
15677 g = gimple_build_omp_section (body);
15678 break;
15679 case OMP_MASTER:
15680 g = gimple_build_omp_master (body);
15681 break;
15682 case OMP_ORDERED:
15683 g = gimplify_omp_ordered (*expr_p, body);
15684 break;
15685 case OMP_MASKED:
15686 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p),
15687 pre_p, ORT_WORKSHARE, OMP_MASKED);
15688 gimplify_adjust_omp_clauses (pre_p, body,
15689 &OMP_MASKED_CLAUSES (*expr_p),
15690 OMP_MASKED);
15691 g = gimple_build_omp_masked (body,
15692 OMP_MASKED_CLAUSES (*expr_p));
15693 break;
15694 case OMP_CRITICAL:
15695 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
15696 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
15697 gimplify_adjust_omp_clauses (pre_p, body,
15698 &OMP_CRITICAL_CLAUSES (*expr_p),
15699 OMP_CRITICAL);
15700 g = gimple_build_omp_critical (body,
15701 OMP_CRITICAL_NAME (*expr_p),
15702 OMP_CRITICAL_CLAUSES (*expr_p));
15703 break;
15704 case OMP_SCAN:
15705 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
15706 pre_p, ORT_WORKSHARE, OMP_SCAN);
15707 gimplify_adjust_omp_clauses (pre_p, body,
15708 &OMP_SCAN_CLAUSES (*expr_p),
15709 OMP_SCAN);
15710 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
15711 break;
15712 default:
15713 gcc_unreachable ();
15714 }
15715 gimplify_seq_add_stmt (pre_p, g);
15716 ret = GS_ALL_DONE;
15717 break;
15718 }
15719
15720 case OMP_TASKGROUP:
15721 {
15722 gimple_seq body = NULL;
15723
15724 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
15725 bool saved_in_omp_construct = in_omp_construct;
15726 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
15727 OMP_TASKGROUP);
15728 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
15729
15730 in_omp_construct = true;
15731 gimplify_and_add (OMP_BODY (*expr_p), &body);
15732 in_omp_construct = saved_in_omp_construct;
15733 gimple_seq cleanup = NULL;
15734 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
15735 gimple *g = gimple_build_call (fn, 0);
15736 gimple_seq_add_stmt (&cleanup, g);
15737 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
15738 body = NULL;
15739 gimple_seq_add_stmt (&body, g);
15740 g = gimple_build_omp_taskgroup (body, *pclauses);
15741 gimplify_seq_add_stmt (pre_p, g);
15742 ret = GS_ALL_DONE;
15743 break;
15744 }
15745
15746 case OMP_ATOMIC:
15747 case OMP_ATOMIC_READ:
15748 case OMP_ATOMIC_CAPTURE_OLD:
15749 case OMP_ATOMIC_CAPTURE_NEW:
15750 ret = gimplify_omp_atomic (expr_p, pre_p);
15751 break;
15752
15753 case TRANSACTION_EXPR:
15754 ret = gimplify_transaction (expr_p, pre_p);
15755 break;
15756
15757 case TRUTH_AND_EXPR:
15758 case TRUTH_OR_EXPR:
15759 case TRUTH_XOR_EXPR:
15760 {
15761 tree orig_type = TREE_TYPE (*expr_p);
15762 tree new_type, xop0, xop1;
15763 *expr_p = gimple_boolify (*expr_p);
15764 new_type = TREE_TYPE (*expr_p);
15765 if (!useless_type_conversion_p (orig_type, new_type))
15766 {
15767 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
15768 ret = GS_OK;
15769 break;
15770 }
15771
15772 /* Boolified binary truth expressions are semantically equivalent
15773 to bitwise binary expressions. Canonicalize them to the
15774 bitwise variant. */
15775 switch (TREE_CODE (*expr_p))
15776 {
15777 case TRUTH_AND_EXPR:
15778 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
15779 break;
15780 case TRUTH_OR_EXPR:
15781 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
15782 break;
15783 case TRUTH_XOR_EXPR:
15784 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
15785 break;
15786 default:
15787 break;
15788 }
15789 /* Now make sure that operands have compatible type to
15790 expression's new_type. */
15791 xop0 = TREE_OPERAND (*expr_p, 0);
15792 xop1 = TREE_OPERAND (*expr_p, 1);
15793 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
15794 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
15795 new_type,
15796 xop0);
15797 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
15798 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
15799 new_type,
15800 xop1);
15801 /* Continue classified as tcc_binary. */
15802 goto expr_2;
15803 }
15804
15805 case VEC_COND_EXPR:
15806 goto expr_3;
15807
15808 case VEC_PERM_EXPR:
15809 /* Classified as tcc_expression. */
15810 goto expr_3;
15811
15812 case BIT_INSERT_EXPR:
15813 /* Argument 3 is a constant. */
15814 goto expr_2;
15815
15816 case POINTER_PLUS_EXPR:
15817 {
15818 enum gimplify_status r0, r1;
15819 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
15820 post_p, is_gimple_val, fb_rvalue);
15821 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
15822 post_p, is_gimple_val, fb_rvalue);
15823 recalculate_side_effects (*expr_p);
15824 ret = MIN (r0, r1);
15825 break;
15826 }
15827
15828 default:
15829 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
15830 {
15831 case tcc_comparison:
15832 /* Handle comparison of objects of non scalar mode aggregates
15833 with a call to memcmp. It would be nice to only have to do
15834 this for variable-sized objects, but then we'd have to allow
15835 the same nest of reference nodes we allow for MODIFY_EXPR and
15836 that's too complex.
15837
15838 Compare scalar mode aggregates as scalar mode values. Using
15839 memcmp for them would be very inefficient at best, and is
15840 plain wrong if bitfields are involved. */
15841 if (error_operand_p (TREE_OPERAND (*expr_p, 1)))
15842 ret = GS_ERROR;
15843 else
15844 {
15845 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
15846
15847 /* Vector comparisons need no boolification. */
15848 if (TREE_CODE (type) == VECTOR_TYPE)
15849 goto expr_2;
15850 else if (!AGGREGATE_TYPE_P (type))
15851 {
15852 tree org_type = TREE_TYPE (*expr_p);
15853 *expr_p = gimple_boolify (*expr_p);
15854 if (!useless_type_conversion_p (org_type,
15855 TREE_TYPE (*expr_p)))
15856 {
15857 *expr_p = fold_convert_loc (input_location,
15858 org_type, *expr_p);
15859 ret = GS_OK;
15860 }
15861 else
15862 goto expr_2;
15863 }
15864 else if (TYPE_MODE (type) != BLKmode)
15865 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
15866 else
15867 ret = gimplify_variable_sized_compare (expr_p);
15868 }
15869 break;
15870
15871 /* If *EXPR_P does not need to be special-cased, handle it
15872 according to its class. */
15873 case tcc_unary:
15874 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
15875 post_p, is_gimple_val, fb_rvalue);
15876 break;
15877
15878 case tcc_binary:
15879 expr_2:
15880 {
15881 enum gimplify_status r0, r1;
15882
15883 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
15884 post_p, is_gimple_val, fb_rvalue);
15885 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
15886 post_p, is_gimple_val, fb_rvalue);
15887
15888 ret = MIN (r0, r1);
15889 break;
15890 }
15891
15892 expr_3:
15893 {
15894 enum gimplify_status r0, r1, r2;
15895
15896 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
15897 post_p, is_gimple_val, fb_rvalue);
15898 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
15899 post_p, is_gimple_val, fb_rvalue);
15900 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
15901 post_p, is_gimple_val, fb_rvalue);
15902
15903 ret = MIN (MIN (r0, r1), r2);
15904 break;
15905 }
15906
15907 case tcc_declaration:
15908 case tcc_constant:
15909 ret = GS_ALL_DONE;
15910 goto dont_recalculate;
15911
15912 default:
15913 gcc_unreachable ();
15914 }
15915
15916 recalculate_side_effects (*expr_p);
15917
15918 dont_recalculate:
15919 break;
15920 }
15921
15922 gcc_assert (*expr_p || ret != GS_OK);
15923 }
15924 while (ret == GS_OK);
15925
15926 /* If we encountered an error_mark somewhere nested inside, either
15927 stub out the statement or propagate the error back out. */
15928 if (ret == GS_ERROR)
15929 {
15930 if (is_statement)
15931 *expr_p = NULL;
15932 goto out;
15933 }
15934
15935 /* This was only valid as a return value from the langhook, which
15936 we handled. Make sure it doesn't escape from any other context. */
15937 gcc_assert (ret != GS_UNHANDLED);
15938
15939 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
15940 {
15941 /* We aren't looking for a value, and we don't have a valid
15942 statement. If it doesn't have side-effects, throw it away.
15943 We can also get here with code such as "*&&L;", where L is
15944 a LABEL_DECL that is marked as FORCED_LABEL. */
15945 if (TREE_CODE (*expr_p) == LABEL_DECL
15946 || !TREE_SIDE_EFFECTS (*expr_p))
15947 *expr_p = NULL;
15948 else if (!TREE_THIS_VOLATILE (*expr_p))
15949 {
15950 /* This is probably a _REF that contains something nested that
15951 has side effects. Recurse through the operands to find it. */
15952 enum tree_code code = TREE_CODE (*expr_p);
15953
15954 switch (code)
15955 {
15956 case COMPONENT_REF:
15957 case REALPART_EXPR:
15958 case IMAGPART_EXPR:
15959 case VIEW_CONVERT_EXPR:
15960 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
15961 gimple_test_f, fallback);
15962 break;
15963
15964 case ARRAY_REF:
15965 case ARRAY_RANGE_REF:
15966 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
15967 gimple_test_f, fallback);
15968 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
15969 gimple_test_f, fallback);
15970 break;
15971
15972 default:
15973 /* Anything else with side-effects must be converted to
15974 a valid statement before we get here. */
15975 gcc_unreachable ();
15976 }
15977
15978 *expr_p = NULL;
15979 }
15980 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
15981 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode
15982 && !is_empty_type (TREE_TYPE (*expr_p)))
15983 {
15984 /* Historically, the compiler has treated a bare reference
15985 to a non-BLKmode volatile lvalue as forcing a load. */
15986 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
15987
15988 /* Normally, we do not want to create a temporary for a
15989 TREE_ADDRESSABLE type because such a type should not be
15990 copied by bitwise-assignment. However, we make an
15991 exception here, as all we are doing here is ensuring that
15992 we read the bytes that make up the type. We use
15993 create_tmp_var_raw because create_tmp_var will abort when
15994 given a TREE_ADDRESSABLE type. */
15995 tree tmp = create_tmp_var_raw (type, "vol");
15996 gimple_add_tmp_var (tmp);
15997 gimplify_assign (tmp, *expr_p, pre_p);
15998 *expr_p = NULL;
15999 }
16000 else
16001 /* We can't do anything useful with a volatile reference to
16002 an incomplete type, so just throw it away. Likewise for
16003 a BLKmode type, since any implicit inner load should
16004 already have been turned into an explicit one by the
16005 gimplification process. */
16006 *expr_p = NULL;
16007 }
16008
16009 /* If we are gimplifying at the statement level, we're done. Tack
16010 everything together and return. */
16011 if (fallback == fb_none || is_statement)
16012 {
16013 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
16014 it out for GC to reclaim it. */
16015 *expr_p = NULL_TREE;
16016
16017 if (!gimple_seq_empty_p (internal_pre)
16018 || !gimple_seq_empty_p (internal_post))
16019 {
16020 gimplify_seq_add_seq (&internal_pre, internal_post);
16021 gimplify_seq_add_seq (pre_p, internal_pre);
16022 }
16023
16024 /* The result of gimplifying *EXPR_P is going to be the last few
16025 statements in *PRE_P and *POST_P. Add location information
16026 to all the statements that were added by the gimplification
16027 helpers. */
16028 if (!gimple_seq_empty_p (*pre_p))
16029 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
16030
16031 if (!gimple_seq_empty_p (*post_p))
16032 annotate_all_with_location_after (*post_p, post_last_gsi,
16033 input_location);
16034
16035 goto out;
16036 }
16037
16038 #ifdef ENABLE_GIMPLE_CHECKING
16039 if (*expr_p)
16040 {
16041 enum tree_code code = TREE_CODE (*expr_p);
16042 /* These expressions should already be in gimple IR form. */
16043 gcc_assert (code != MODIFY_EXPR
16044 && code != ASM_EXPR
16045 && code != BIND_EXPR
16046 && code != CATCH_EXPR
16047 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
16048 && code != EH_FILTER_EXPR
16049 && code != GOTO_EXPR
16050 && code != LABEL_EXPR
16051 && code != LOOP_EXPR
16052 && code != SWITCH_EXPR
16053 && code != TRY_FINALLY_EXPR
16054 && code != EH_ELSE_EXPR
16055 && code != OACC_PARALLEL
16056 && code != OACC_KERNELS
16057 && code != OACC_SERIAL
16058 && code != OACC_DATA
16059 && code != OACC_HOST_DATA
16060 && code != OACC_DECLARE
16061 && code != OACC_UPDATE
16062 && code != OACC_ENTER_DATA
16063 && code != OACC_EXIT_DATA
16064 && code != OACC_CACHE
16065 && code != OMP_CRITICAL
16066 && code != OMP_FOR
16067 && code != OACC_LOOP
16068 && code != OMP_MASTER
16069 && code != OMP_MASKED
16070 && code != OMP_TASKGROUP
16071 && code != OMP_ORDERED
16072 && code != OMP_PARALLEL
16073 && code != OMP_SCAN
16074 && code != OMP_SECTIONS
16075 && code != OMP_SECTION
16076 && code != OMP_SINGLE
16077 && code != OMP_SCOPE);
16078 }
16079 #endif
16080
16081 /* Otherwise we're gimplifying a subexpression, so the resulting
16082 value is interesting. If it's a valid operand that matches
16083 GIMPLE_TEST_F, we're done. Unless we are handling some
16084 post-effects internally; if that's the case, we need to copy into
16085 a temporary before adding the post-effects to POST_P. */
16086 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
16087 goto out;
16088
16089 /* Otherwise, we need to create a new temporary for the gimplified
16090 expression. */
16091
16092 /* We can't return an lvalue if we have an internal postqueue. The
16093 object the lvalue refers to would (probably) be modified by the
16094 postqueue; we need to copy the value out first, which means an
16095 rvalue. */
16096 if ((fallback & fb_lvalue)
16097 && gimple_seq_empty_p (internal_post)
16098 && is_gimple_addressable (*expr_p))
16099 {
16100 /* An lvalue will do. Take the address of the expression, store it
16101 in a temporary, and replace the expression with an INDIRECT_REF of
16102 that temporary. */
16103 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
16104 unsigned int ref_align = get_object_alignment (*expr_p);
16105 tree ref_type = TREE_TYPE (*expr_p);
16106 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
16107 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
16108 if (TYPE_ALIGN (ref_type) != ref_align)
16109 ref_type = build_aligned_type (ref_type, ref_align);
16110 *expr_p = build2 (MEM_REF, ref_type,
16111 tmp, build_zero_cst (ref_alias_type));
16112 }
16113 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
16114 {
16115 /* An rvalue will do. Assign the gimplified expression into a
16116 new temporary TMP and replace the original expression with
16117 TMP. First, make sure that the expression has a type so that
16118 it can be assigned into a temporary. */
16119 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
16120 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
16121 }
16122 else
16123 {
16124 #ifdef ENABLE_GIMPLE_CHECKING
16125 if (!(fallback & fb_mayfail))
16126 {
16127 fprintf (stderr, "gimplification failed:\n");
16128 print_generic_expr (stderr, *expr_p);
16129 debug_tree (*expr_p);
16130 internal_error ("gimplification failed");
16131 }
16132 #endif
16133 gcc_assert (fallback & fb_mayfail);
16134
16135 /* If this is an asm statement, and the user asked for the
16136 impossible, don't die. Fail and let gimplify_asm_expr
16137 issue an error. */
16138 ret = GS_ERROR;
16139 goto out;
16140 }
16141
16142 /* Make sure the temporary matches our predicate. */
16143 gcc_assert ((*gimple_test_f) (*expr_p));
16144
16145 if (!gimple_seq_empty_p (internal_post))
16146 {
16147 annotate_all_with_location (internal_post, input_location);
16148 gimplify_seq_add_seq (pre_p, internal_post);
16149 }
16150
16151 out:
16152 input_location = saved_location;
16153 return ret;
16154 }
16155
16156 /* Like gimplify_expr but make sure the gimplified result is not itself
16157 a SSA name (but a decl if it were). Temporaries required by
16158 evaluating *EXPR_P may be still SSA names. */
16159
16160 static enum gimplify_status
gimplify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool (* gimple_test_f)(tree),fallback_t fallback,bool allow_ssa)16161 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
16162 bool (*gimple_test_f) (tree), fallback_t fallback,
16163 bool allow_ssa)
16164 {
16165 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
16166 gimple_test_f, fallback);
16167 if (! allow_ssa
16168 && TREE_CODE (*expr_p) == SSA_NAME)
16169 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
16170 return ret;
16171 }
16172
16173 /* Look through TYPE for variable-sized objects and gimplify each such
16174 size that we find. Add to LIST_P any statements generated. */
16175
16176 void
gimplify_type_sizes(tree type,gimple_seq * list_p)16177 gimplify_type_sizes (tree type, gimple_seq *list_p)
16178 {
16179 if (type == NULL || type == error_mark_node)
16180 return;
16181
16182 const bool ignored_p
16183 = TYPE_NAME (type)
16184 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
16185 && DECL_IGNORED_P (TYPE_NAME (type));
16186 tree t;
16187
16188 /* We first do the main variant, then copy into any other variants. */
16189 type = TYPE_MAIN_VARIANT (type);
16190
16191 /* Avoid infinite recursion. */
16192 if (TYPE_SIZES_GIMPLIFIED (type))
16193 return;
16194
16195 TYPE_SIZES_GIMPLIFIED (type) = 1;
16196
16197 switch (TREE_CODE (type))
16198 {
16199 case INTEGER_TYPE:
16200 case ENUMERAL_TYPE:
16201 case BOOLEAN_TYPE:
16202 case REAL_TYPE:
16203 case FIXED_POINT_TYPE:
16204 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
16205 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
16206
16207 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
16208 {
16209 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
16210 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
16211 }
16212 break;
16213
16214 case ARRAY_TYPE:
16215 /* These types may not have declarations, so handle them here. */
16216 gimplify_type_sizes (TREE_TYPE (type), list_p);
16217 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
16218 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
16219 with assigned stack slots, for -O1+ -g they should be tracked
16220 by VTA. */
16221 if (!ignored_p
16222 && TYPE_DOMAIN (type)
16223 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
16224 {
16225 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
16226 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
16227 DECL_IGNORED_P (t) = 0;
16228 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
16229 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
16230 DECL_IGNORED_P (t) = 0;
16231 }
16232 break;
16233
16234 case RECORD_TYPE:
16235 case UNION_TYPE:
16236 case QUAL_UNION_TYPE:
16237 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
16238 if (TREE_CODE (field) == FIELD_DECL)
16239 {
16240 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
16241 /* Likewise, ensure variable offsets aren't removed. */
16242 if (!ignored_p
16243 && (t = DECL_FIELD_OFFSET (field))
16244 && VAR_P (t)
16245 && DECL_ARTIFICIAL (t))
16246 DECL_IGNORED_P (t) = 0;
16247 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
16248 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
16249 gimplify_type_sizes (TREE_TYPE (field), list_p);
16250 }
16251 break;
16252
16253 case POINTER_TYPE:
16254 case REFERENCE_TYPE:
16255 /* We used to recurse on the pointed-to type here, which turned out to
16256 be incorrect because its definition might refer to variables not
16257 yet initialized at this point if a forward declaration is involved.
16258
16259 It was actually useful for anonymous pointed-to types to ensure
16260 that the sizes evaluation dominates every possible later use of the
16261 values. Restricting to such types here would be safe since there
16262 is no possible forward declaration around, but would introduce an
16263 undesirable middle-end semantic to anonymity. We then defer to
16264 front-ends the responsibility of ensuring that the sizes are
16265 evaluated both early and late enough, e.g. by attaching artificial
16266 type declarations to the tree. */
16267 break;
16268
16269 default:
16270 break;
16271 }
16272
16273 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
16274 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
16275
16276 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
16277 {
16278 TYPE_SIZE (t) = TYPE_SIZE (type);
16279 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
16280 TYPE_SIZES_GIMPLIFIED (t) = 1;
16281 }
16282 }
16283
16284 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
16285 a size or position, has had all of its SAVE_EXPRs evaluated.
16286 We add any required statements to *STMT_P. */
16287
16288 void
gimplify_one_sizepos(tree * expr_p,gimple_seq * stmt_p)16289 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
16290 {
16291 tree expr = *expr_p;
16292
16293 /* We don't do anything if the value isn't there, is constant, or contains
16294 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
16295 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
16296 will want to replace it with a new variable, but that will cause problems
16297 if this type is from outside the function. It's OK to have that here. */
16298 if (expr == NULL_TREE
16299 || is_gimple_constant (expr)
16300 || TREE_CODE (expr) == VAR_DECL
16301 || CONTAINS_PLACEHOLDER_P (expr))
16302 return;
16303
16304 *expr_p = unshare_expr (expr);
16305
16306 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
16307 if the def vanishes. */
16308 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
16309
16310 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
16311 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
16312 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
16313 if (is_gimple_constant (*expr_p))
16314 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
16315 }
16316
16317 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
16318 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
16319 is true, also gimplify the parameters. */
16320
16321 gbind *
gimplify_body(tree fndecl,bool do_parms)16322 gimplify_body (tree fndecl, bool do_parms)
16323 {
16324 location_t saved_location = input_location;
16325 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
16326 gimple *outer_stmt;
16327 gbind *outer_bind;
16328
16329 timevar_push (TV_TREE_GIMPLIFY);
16330
16331 init_tree_ssa (cfun);
16332
16333 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
16334 gimplification. */
16335 default_rtl_profile ();
16336
16337 gcc_assert (gimplify_ctxp == NULL);
16338 push_gimplify_context (true);
16339
16340 if (flag_openacc || flag_openmp)
16341 {
16342 gcc_assert (gimplify_omp_ctxp == NULL);
16343 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
16344 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
16345 }
16346
16347 /* Unshare most shared trees in the body and in that of any nested functions.
16348 It would seem we don't have to do this for nested functions because
16349 they are supposed to be output and then the outer function gimplified
16350 first, but the g++ front end doesn't always do it that way. */
16351 unshare_body (fndecl);
16352 unvisit_body (fndecl);
16353
16354 /* Make sure input_location isn't set to something weird. */
16355 input_location = DECL_SOURCE_LOCATION (fndecl);
16356
16357 /* Resolve callee-copies. This has to be done before processing
16358 the body so that DECL_VALUE_EXPR gets processed correctly. */
16359 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
16360
16361 /* Gimplify the function's body. */
16362 seq = NULL;
16363 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
16364 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
16365 if (!outer_stmt)
16366 {
16367 outer_stmt = gimple_build_nop ();
16368 gimplify_seq_add_stmt (&seq, outer_stmt);
16369 }
16370
16371 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
16372 not the case, wrap everything in a GIMPLE_BIND to make it so. */
16373 if (gimple_code (outer_stmt) == GIMPLE_BIND
16374 && (gimple_seq_first_nondebug_stmt (seq)
16375 == gimple_seq_last_nondebug_stmt (seq)))
16376 {
16377 outer_bind = as_a <gbind *> (outer_stmt);
16378 if (gimple_seq_first_stmt (seq) != outer_stmt
16379 || gimple_seq_last_stmt (seq) != outer_stmt)
16380 {
16381 /* If there are debug stmts before or after outer_stmt, move them
16382 inside of outer_bind body. */
16383 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
16384 gimple_seq second_seq = NULL;
16385 if (gimple_seq_first_stmt (seq) != outer_stmt
16386 && gimple_seq_last_stmt (seq) != outer_stmt)
16387 {
16388 second_seq = gsi_split_seq_after (gsi);
16389 gsi_remove (&gsi, false);
16390 }
16391 else if (gimple_seq_first_stmt (seq) != outer_stmt)
16392 gsi_remove (&gsi, false);
16393 else
16394 {
16395 gsi_remove (&gsi, false);
16396 second_seq = seq;
16397 seq = NULL;
16398 }
16399 gimple_seq_add_seq_without_update (&seq,
16400 gimple_bind_body (outer_bind));
16401 gimple_seq_add_seq_without_update (&seq, second_seq);
16402 gimple_bind_set_body (outer_bind, seq);
16403 }
16404 }
16405 else
16406 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
16407
16408 DECL_SAVED_TREE (fndecl) = NULL_TREE;
16409
16410 /* If we had callee-copies statements, insert them at the beginning
16411 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
16412 if (!gimple_seq_empty_p (parm_stmts))
16413 {
16414 tree parm;
16415
16416 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
16417 if (parm_cleanup)
16418 {
16419 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
16420 GIMPLE_TRY_FINALLY);
16421 parm_stmts = NULL;
16422 gimple_seq_add_stmt (&parm_stmts, g);
16423 }
16424 gimple_bind_set_body (outer_bind, parm_stmts);
16425
16426 for (parm = DECL_ARGUMENTS (current_function_decl);
16427 parm; parm = DECL_CHAIN (parm))
16428 if (DECL_HAS_VALUE_EXPR_P (parm))
16429 {
16430 DECL_HAS_VALUE_EXPR_P (parm) = 0;
16431 DECL_IGNORED_P (parm) = 0;
16432 }
16433 }
16434
16435 if ((flag_openacc || flag_openmp || flag_openmp_simd)
16436 && gimplify_omp_ctxp)
16437 {
16438 delete_omp_context (gimplify_omp_ctxp);
16439 gimplify_omp_ctxp = NULL;
16440 }
16441
16442 pop_gimplify_context (outer_bind);
16443 gcc_assert (gimplify_ctxp == NULL);
16444
16445 if (flag_checking && !seen_error ())
16446 verify_gimple_in_seq (gimple_bind_body (outer_bind));
16447
16448 timevar_pop (TV_TREE_GIMPLIFY);
16449 input_location = saved_location;
16450
16451 return outer_bind;
16452 }
16453
16454 typedef char *char_p; /* For DEF_VEC_P. */
16455
16456 /* Return whether we should exclude FNDECL from instrumentation. */
16457
16458 static bool
flag_instrument_functions_exclude_p(tree fndecl)16459 flag_instrument_functions_exclude_p (tree fndecl)
16460 {
16461 vec<char_p> *v;
16462
16463 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
16464 if (v && v->length () > 0)
16465 {
16466 const char *name;
16467 int i;
16468 char *s;
16469
16470 name = lang_hooks.decl_printable_name (fndecl, 1);
16471 FOR_EACH_VEC_ELT (*v, i, s)
16472 if (strstr (name, s) != NULL)
16473 return true;
16474 }
16475
16476 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
16477 if (v && v->length () > 0)
16478 {
16479 const char *name;
16480 int i;
16481 char *s;
16482
16483 name = DECL_SOURCE_FILE (fndecl);
16484 FOR_EACH_VEC_ELT (*v, i, s)
16485 if (strstr (name, s) != NULL)
16486 return true;
16487 }
16488
16489 return false;
16490 }
16491
16492 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
16493 node for the function we want to gimplify.
16494
16495 Return the sequence of GIMPLE statements corresponding to the body
16496 of FNDECL. */
16497
16498 void
gimplify_function_tree(tree fndecl)16499 gimplify_function_tree (tree fndecl)
16500 {
16501 gimple_seq seq;
16502 gbind *bind;
16503
16504 gcc_assert (!gimple_body (fndecl));
16505
16506 if (DECL_STRUCT_FUNCTION (fndecl))
16507 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
16508 else
16509 push_struct_function (fndecl);
16510
16511 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
16512 if necessary. */
16513 cfun->curr_properties |= PROP_gimple_lva;
16514
16515 if (asan_sanitize_use_after_scope ())
16516 asan_poisoned_variables = new hash_set<tree> ();
16517 bind = gimplify_body (fndecl, true);
16518 if (asan_poisoned_variables)
16519 {
16520 delete asan_poisoned_variables;
16521 asan_poisoned_variables = NULL;
16522 }
16523
16524 /* The tree body of the function is no longer needed, replace it
16525 with the new GIMPLE body. */
16526 seq = NULL;
16527 gimple_seq_add_stmt (&seq, bind);
16528 gimple_set_body (fndecl, seq);
16529
16530 /* If we're instrumenting function entry/exit, then prepend the call to
16531 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
16532 catch the exit hook. */
16533 /* ??? Add some way to ignore exceptions for this TFE. */
16534 if (flag_instrument_function_entry_exit
16535 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
16536 /* Do not instrument extern inline functions. */
16537 && !(DECL_DECLARED_INLINE_P (fndecl)
16538 && DECL_EXTERNAL (fndecl)
16539 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
16540 && !flag_instrument_functions_exclude_p (fndecl))
16541 {
16542 tree x;
16543 gbind *new_bind;
16544 gimple *tf;
16545 gimple_seq cleanup = NULL, body = NULL;
16546 tree tmp_var, this_fn_addr;
16547 gcall *call;
16548
16549 /* The instrumentation hooks aren't going to call the instrumented
16550 function and the address they receive is expected to be matchable
16551 against symbol addresses. Make sure we don't create a trampoline,
16552 in case the current function is nested. */
16553 this_fn_addr = build_fold_addr_expr (current_function_decl);
16554 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
16555
16556 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
16557 call = gimple_build_call (x, 1, integer_zero_node);
16558 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
16559 gimple_call_set_lhs (call, tmp_var);
16560 gimplify_seq_add_stmt (&cleanup, call);
16561 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
16562 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
16563 gimplify_seq_add_stmt (&cleanup, call);
16564 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
16565
16566 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
16567 call = gimple_build_call (x, 1, integer_zero_node);
16568 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
16569 gimple_call_set_lhs (call, tmp_var);
16570 gimplify_seq_add_stmt (&body, call);
16571 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
16572 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
16573 gimplify_seq_add_stmt (&body, call);
16574 gimplify_seq_add_stmt (&body, tf);
16575 new_bind = gimple_build_bind (NULL, body, NULL);
16576
16577 /* Replace the current function body with the body
16578 wrapped in the try/finally TF. */
16579 seq = NULL;
16580 gimple_seq_add_stmt (&seq, new_bind);
16581 gimple_set_body (fndecl, seq);
16582 bind = new_bind;
16583 }
16584
16585 if (sanitize_flags_p (SANITIZE_THREAD)
16586 && param_tsan_instrument_func_entry_exit)
16587 {
16588 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
16589 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
16590 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
16591 /* Replace the current function body with the body
16592 wrapped in the try/finally TF. */
16593 seq = NULL;
16594 gimple_seq_add_stmt (&seq, new_bind);
16595 gimple_set_body (fndecl, seq);
16596 }
16597
16598 DECL_SAVED_TREE (fndecl) = NULL_TREE;
16599 cfun->curr_properties |= PROP_gimple_any;
16600
16601 pop_cfun ();
16602
16603 dump_function (TDI_gimple, fndecl);
16604 }
16605
16606 /* Return a dummy expression of type TYPE in order to keep going after an
16607 error. */
16608
16609 static tree
dummy_object(tree type)16610 dummy_object (tree type)
16611 {
16612 tree t = build_int_cst (build_pointer_type (type), 0);
16613 return build2 (MEM_REF, type, t, t);
16614 }
16615
16616 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
16617 builtin function, but a very special sort of operator. */
16618
16619 enum gimplify_status
gimplify_va_arg_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p ATTRIBUTE_UNUSED)16620 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
16621 gimple_seq *post_p ATTRIBUTE_UNUSED)
16622 {
16623 tree promoted_type, have_va_type;
16624 tree valist = TREE_OPERAND (*expr_p, 0);
16625 tree type = TREE_TYPE (*expr_p);
16626 tree t, tag, aptag;
16627 location_t loc = EXPR_LOCATION (*expr_p);
16628
16629 /* Verify that valist is of the proper type. */
16630 have_va_type = TREE_TYPE (valist);
16631 if (have_va_type == error_mark_node)
16632 return GS_ERROR;
16633 have_va_type = targetm.canonical_va_list_type (have_va_type);
16634 if (have_va_type == NULL_TREE
16635 && POINTER_TYPE_P (TREE_TYPE (valist)))
16636 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
16637 have_va_type
16638 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
16639 gcc_assert (have_va_type != NULL_TREE);
16640
16641 /* Generate a diagnostic for requesting data of a type that cannot
16642 be passed through `...' due to type promotion at the call site. */
16643 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
16644 != type)
16645 {
16646 static bool gave_help;
16647 bool warned;
16648 /* Use the expansion point to handle cases such as passing bool (defined
16649 in a system header) through `...'. */
16650 location_t xloc
16651 = expansion_point_location_if_in_system_header (loc);
16652
16653 /* Unfortunately, this is merely undefined, rather than a constraint
16654 violation, so we cannot make this an error. If this call is never
16655 executed, the program is still strictly conforming. */
16656 auto_diagnostic_group d;
16657 warned = warning_at (xloc, 0,
16658 "%qT is promoted to %qT when passed through %<...%>",
16659 type, promoted_type);
16660 if (!gave_help && warned)
16661 {
16662 gave_help = true;
16663 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
16664 promoted_type, type);
16665 }
16666
16667 /* We can, however, treat "undefined" any way we please.
16668 Call abort to encourage the user to fix the program. */
16669 if (warned)
16670 inform (xloc, "if this code is reached, the program will abort");
16671 /* Before the abort, allow the evaluation of the va_list
16672 expression to exit or longjmp. */
16673 gimplify_and_add (valist, pre_p);
16674 t = build_call_expr_loc (loc,
16675 builtin_decl_implicit (BUILT_IN_TRAP), 0);
16676 gimplify_and_add (t, pre_p);
16677
16678 /* This is dead code, but go ahead and finish so that the
16679 mode of the result comes out right. */
16680 *expr_p = dummy_object (type);
16681 return GS_ALL_DONE;
16682 }
16683
16684 tag = build_int_cst (build_pointer_type (type), 0);
16685 aptag = build_int_cst (TREE_TYPE (valist), 0);
16686
16687 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
16688 valist, tag, aptag);
16689
16690 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
16691 needs to be expanded. */
16692 cfun->curr_properties &= ~PROP_gimple_lva;
16693
16694 return GS_OK;
16695 }
16696
16697 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
16698
16699 DST/SRC are the destination and source respectively. You can pass
16700 ungimplified trees in DST or SRC, in which case they will be
16701 converted to a gimple operand if necessary.
16702
16703 This function returns the newly created GIMPLE_ASSIGN tuple. */
16704
16705 gimple *
gimplify_assign(tree dst,tree src,gimple_seq * seq_p)16706 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
16707 {
16708 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
16709 gimplify_and_add (t, seq_p);
16710 ggc_free (t);
16711 return gimple_seq_last_stmt (*seq_p);
16712 }
16713
16714 inline hashval_t
hash(const elt_t * p)16715 gimplify_hasher::hash (const elt_t *p)
16716 {
16717 tree t = p->val;
16718 return iterative_hash_expr (t, 0);
16719 }
16720
16721 inline bool
equal(const elt_t * p1,const elt_t * p2)16722 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
16723 {
16724 tree t1 = p1->val;
16725 tree t2 = p2->val;
16726 enum tree_code code = TREE_CODE (t1);
16727
16728 if (TREE_CODE (t2) != code
16729 || TREE_TYPE (t1) != TREE_TYPE (t2))
16730 return false;
16731
16732 if (!operand_equal_p (t1, t2, 0))
16733 return false;
16734
16735 /* Only allow them to compare equal if they also hash equal; otherwise
16736 results are nondeterminate, and we fail bootstrap comparison. */
16737 gcc_checking_assert (hash (p1) == hash (p2));
16738
16739 return true;
16740 }
16741