1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees 2 tree representation into the GIMPLE form. 3 Copyright (C) 2002-2017 Free Software Foundation, Inc. 4 Major work done by Sebastian Pop <s.pop@laposte.net>, 5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>. 6 7 This file is part of GCC. 8 9 GCC is free software; you can redistribute it and/or modify it under 10 the terms of the GNU General Public License as published by the Free 11 Software Foundation; either version 3, or (at your option) any later 12 version. 13 14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 15 WARRANTY; without even the implied warranty of MERCHANTABILITY or 16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 17 for more details. 18 19 You should have received a copy of the GNU General Public License 20 along with GCC; see the file COPYING3. If not see 21 <http://www.gnu.org/licenses/>. */ 22 23 #include "config.h" 24 #include "system.h" 25 #include "coretypes.h" 26 #include "backend.h" 27 #include "target.h" 28 #include "rtl.h" 29 #include "tree.h" 30 #include "gimple.h" 31 #include "gimple-predict.h" 32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */ 33 #include "ssa.h" 34 #include "cgraph.h" 35 #include "tree-pretty-print.h" 36 #include "diagnostic-core.h" 37 #include "alias.h" 38 #include "fold-const.h" 39 #include "calls.h" 40 #include "varasm.h" 41 #include "stmt.h" 42 #include "expr.h" 43 #include "gimple-fold.h" 44 #include "tree-eh.h" 45 #include "gimplify.h" 46 #include "gimple-iterator.h" 47 #include "stor-layout.h" 48 #include "print-tree.h" 49 #include "tree-iterator.h" 50 #include "tree-inline.h" 51 #include "langhooks.h" 52 #include "tree-cfg.h" 53 #include "tree-ssa.h" 54 #include "omp-general.h" 55 #include "omp-low.h" 56 #include "gimple-low.h" 57 #include "cilk.h" 58 #include "gomp-constants.h" 59 #include "tree-dump.h" 60 #include "gimple-walk.h" 61 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */ 62 #include "builtins.h" 63 #include "asan.h" 64 #include "dbgcnt.h" 65 66 /* Hash set of poisoned variables in a bind expr. */ 67 static hash_set<tree> *asan_poisoned_variables = NULL; 68 69 enum gimplify_omp_var_data 70 { 71 GOVD_SEEN = 1, 72 GOVD_EXPLICIT = 2, 73 GOVD_SHARED = 4, 74 GOVD_PRIVATE = 8, 75 GOVD_FIRSTPRIVATE = 16, 76 GOVD_LASTPRIVATE = 32, 77 GOVD_REDUCTION = 64, 78 GOVD_LOCAL = 128, 79 GOVD_MAP = 256, 80 GOVD_DEBUG_PRIVATE = 512, 81 GOVD_PRIVATE_OUTER_REF = 1024, 82 GOVD_LINEAR = 2048, 83 GOVD_ALIGNED = 4096, 84 85 /* Flag for GOVD_MAP: don't copy back. */ 86 GOVD_MAP_TO_ONLY = 8192, 87 88 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */ 89 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384, 90 91 GOVD_MAP_0LEN_ARRAY = 32768, 92 93 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */ 94 GOVD_MAP_ALWAYS_TO = 65536, 95 96 /* Flag for shared vars that are or might be stored to in the region. */ 97 GOVD_WRITTEN = 131072, 98 99 /* Flag for GOVD_MAP, if it is a forced mapping. */ 100 GOVD_MAP_FORCE = 262144, 101 102 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE 103 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR 104 | GOVD_LOCAL) 105 }; 106 107 108 enum omp_region_type 109 { 110 ORT_WORKSHARE = 0x00, 111 ORT_SIMD = 0x01, 112 113 ORT_PARALLEL = 0x02, 114 ORT_COMBINED_PARALLEL = 0x03, 115 116 ORT_TASK = 0x04, 117 ORT_UNTIED_TASK = 0x05, 118 119 ORT_TEAMS = 0x08, 120 ORT_COMBINED_TEAMS = 0x09, 121 122 /* Data region. */ 123 ORT_TARGET_DATA = 0x10, 124 125 /* Data region with offloading. */ 126 ORT_TARGET = 0x20, 127 ORT_COMBINED_TARGET = 0x21, 128 129 /* OpenACC variants. */ 130 ORT_ACC = 0x40, /* A generic OpenACC region. */ 131 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */ 132 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */ 133 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */ 134 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */ 135 136 /* Dummy OpenMP region, used to disable expansion of 137 DECL_VALUE_EXPRs in taskloop pre body. */ 138 ORT_NONE = 0x100 139 }; 140 141 /* Gimplify hashtable helper. */ 142 143 struct gimplify_hasher : free_ptr_hash <elt_t> 144 { 145 static inline hashval_t hash (const elt_t *); 146 static inline bool equal (const elt_t *, const elt_t *); 147 }; 148 149 struct gimplify_ctx 150 { 151 struct gimplify_ctx *prev_context; 152 153 vec<gbind *> bind_expr_stack; 154 tree temps; 155 gimple_seq conditional_cleanups; 156 tree exit_label; 157 tree return_temp; 158 159 vec<tree> case_labels; 160 hash_set<tree> *live_switch_vars; 161 /* The formal temporary table. Should this be persistent? */ 162 hash_table<gimplify_hasher> *temp_htab; 163 164 int conditions; 165 unsigned into_ssa : 1; 166 unsigned allow_rhs_cond_expr : 1; 167 unsigned in_cleanup_point_expr : 1; 168 unsigned keep_stack : 1; 169 unsigned save_stack : 1; 170 unsigned in_switch_expr : 1; 171 }; 172 173 struct gimplify_omp_ctx 174 { 175 struct gimplify_omp_ctx *outer_context; 176 splay_tree variables; 177 hash_set<tree> *privatized_types; 178 /* Iteration variables in an OMP_FOR. */ 179 vec<tree> loop_iter_var; 180 location_t location; 181 enum omp_clause_default_kind default_kind; 182 enum omp_region_type region_type; 183 bool combined_loop; 184 bool distribute; 185 bool target_map_scalars_firstprivate; 186 bool target_map_pointers_as_0len_arrays; 187 bool target_firstprivatize_array_bases; 188 }; 189 190 static struct gimplify_ctx *gimplify_ctxp; 191 static struct gimplify_omp_ctx *gimplify_omp_ctxp; 192 193 /* Forward declaration. */ 194 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool); 195 static hash_map<tree, tree> *oacc_declare_returns; 196 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *, 197 bool (*) (tree), fallback_t, bool); 198 199 /* Shorter alias name for the above function for use in gimplify.c 200 only. */ 201 202 static inline void 203 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs) 204 { 205 gimple_seq_add_stmt_without_update (seq_p, gs); 206 } 207 208 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is 209 NULL, a new sequence is allocated. This function is 210 similar to gimple_seq_add_seq, but does not scan the operands. 211 During gimplification, we need to manipulate statement sequences 212 before the def/use vectors have been constructed. */ 213 214 static void 215 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src) 216 { 217 gimple_stmt_iterator si; 218 219 if (src == NULL) 220 return; 221 222 si = gsi_last (*dst_p); 223 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT); 224 } 225 226 227 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing 228 and popping gimplify contexts. */ 229 230 static struct gimplify_ctx *ctx_pool = NULL; 231 232 /* Return a gimplify context struct from the pool. */ 233 234 static inline struct gimplify_ctx * 235 ctx_alloc (void) 236 { 237 struct gimplify_ctx * c = ctx_pool; 238 239 if (c) 240 ctx_pool = c->prev_context; 241 else 242 c = XNEW (struct gimplify_ctx); 243 244 memset (c, '\0', sizeof (*c)); 245 return c; 246 } 247 248 /* Put gimplify context C back into the pool. */ 249 250 static inline void 251 ctx_free (struct gimplify_ctx *c) 252 { 253 c->prev_context = ctx_pool; 254 ctx_pool = c; 255 } 256 257 /* Free allocated ctx stack memory. */ 258 259 void 260 free_gimplify_stack (void) 261 { 262 struct gimplify_ctx *c; 263 264 while ((c = ctx_pool)) 265 { 266 ctx_pool = c->prev_context; 267 free (c); 268 } 269 } 270 271 272 /* Set up a context for the gimplifier. */ 273 274 void 275 push_gimplify_context (bool in_ssa, bool rhs_cond_ok) 276 { 277 struct gimplify_ctx *c = ctx_alloc (); 278 279 c->prev_context = gimplify_ctxp; 280 gimplify_ctxp = c; 281 gimplify_ctxp->into_ssa = in_ssa; 282 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok; 283 } 284 285 /* Tear down a context for the gimplifier. If BODY is non-null, then 286 put the temporaries into the outer BIND_EXPR. Otherwise, put them 287 in the local_decls. 288 289 BODY is not a sequence, but the first tuple in a sequence. */ 290 291 void 292 pop_gimplify_context (gimple *body) 293 { 294 struct gimplify_ctx *c = gimplify_ctxp; 295 296 gcc_assert (c 297 && (!c->bind_expr_stack.exists () 298 || c->bind_expr_stack.is_empty ())); 299 c->bind_expr_stack.release (); 300 gimplify_ctxp = c->prev_context; 301 302 if (body) 303 declare_vars (c->temps, body, false); 304 else 305 record_vars (c->temps); 306 307 delete c->temp_htab; 308 c->temp_htab = NULL; 309 ctx_free (c); 310 } 311 312 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */ 313 314 static void 315 gimple_push_bind_expr (gbind *bind_stmt) 316 { 317 gimplify_ctxp->bind_expr_stack.reserve (8); 318 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt); 319 } 320 321 /* Pop the first element off the stack of bindings. */ 322 323 static void 324 gimple_pop_bind_expr (void) 325 { 326 gimplify_ctxp->bind_expr_stack.pop (); 327 } 328 329 /* Return the first element of the stack of bindings. */ 330 331 gbind * 332 gimple_current_bind_expr (void) 333 { 334 return gimplify_ctxp->bind_expr_stack.last (); 335 } 336 337 /* Return the stack of bindings created during gimplification. */ 338 339 vec<gbind *> 340 gimple_bind_expr_stack (void) 341 { 342 return gimplify_ctxp->bind_expr_stack; 343 } 344 345 /* Return true iff there is a COND_EXPR between us and the innermost 346 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */ 347 348 static bool 349 gimple_conditional_context (void) 350 { 351 return gimplify_ctxp->conditions > 0; 352 } 353 354 /* Note that we've entered a COND_EXPR. */ 355 356 static void 357 gimple_push_condition (void) 358 { 359 #ifdef ENABLE_GIMPLE_CHECKING 360 if (gimplify_ctxp->conditions == 0) 361 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups)); 362 #endif 363 ++(gimplify_ctxp->conditions); 364 } 365 366 /* Note that we've left a COND_EXPR. If we're back at unconditional scope 367 now, add any conditional cleanups we've seen to the prequeue. */ 368 369 static void 370 gimple_pop_condition (gimple_seq *pre_p) 371 { 372 int conds = --(gimplify_ctxp->conditions); 373 374 gcc_assert (conds >= 0); 375 if (conds == 0) 376 { 377 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups); 378 gimplify_ctxp->conditional_cleanups = NULL; 379 } 380 } 381 382 /* A stable comparison routine for use with splay trees and DECLs. */ 383 384 static int 385 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb) 386 { 387 tree a = (tree) xa; 388 tree b = (tree) xb; 389 390 return DECL_UID (a) - DECL_UID (b); 391 } 392 393 /* Create a new omp construct that deals with variable remapping. */ 394 395 static struct gimplify_omp_ctx * 396 new_omp_context (enum omp_region_type region_type) 397 { 398 struct gimplify_omp_ctx *c; 399 400 c = XCNEW (struct gimplify_omp_ctx); 401 c->outer_context = gimplify_omp_ctxp; 402 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); 403 c->privatized_types = new hash_set<tree>; 404 c->location = input_location; 405 c->region_type = region_type; 406 if ((region_type & ORT_TASK) == 0) 407 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED; 408 else 409 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED; 410 411 return c; 412 } 413 414 /* Destroy an omp construct that deals with variable remapping. */ 415 416 static void 417 delete_omp_context (struct gimplify_omp_ctx *c) 418 { 419 splay_tree_delete (c->variables); 420 delete c->privatized_types; 421 c->loop_iter_var.release (); 422 XDELETE (c); 423 } 424 425 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int); 426 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool); 427 428 /* Both gimplify the statement T and append it to *SEQ_P. This function 429 behaves exactly as gimplify_stmt, but you don't have to pass T as a 430 reference. */ 431 432 void 433 gimplify_and_add (tree t, gimple_seq *seq_p) 434 { 435 gimplify_stmt (&t, seq_p); 436 } 437 438 /* Gimplify statement T into sequence *SEQ_P, and return the first 439 tuple in the sequence of generated tuples for this statement. 440 Return NULL if gimplifying T produced no tuples. */ 441 442 static gimple * 443 gimplify_and_return_first (tree t, gimple_seq *seq_p) 444 { 445 gimple_stmt_iterator last = gsi_last (*seq_p); 446 447 gimplify_and_add (t, seq_p); 448 449 if (!gsi_end_p (last)) 450 { 451 gsi_next (&last); 452 return gsi_stmt (last); 453 } 454 else 455 return gimple_seq_first_stmt (*seq_p); 456 } 457 458 /* Returns true iff T is a valid RHS for an assignment to an un-renamed 459 LHS, or for a call argument. */ 460 461 static bool 462 is_gimple_mem_rhs (tree t) 463 { 464 /* If we're dealing with a renamable type, either source or dest must be 465 a renamed variable. */ 466 if (is_gimple_reg_type (TREE_TYPE (t))) 467 return is_gimple_val (t); 468 else 469 return is_gimple_val (t) || is_gimple_lvalue (t); 470 } 471 472 /* Return true if T is a CALL_EXPR or an expression that can be 473 assigned to a temporary. Note that this predicate should only be 474 used during gimplification. See the rationale for this in 475 gimplify_modify_expr. */ 476 477 static bool 478 is_gimple_reg_rhs_or_call (tree t) 479 { 480 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS 481 || TREE_CODE (t) == CALL_EXPR); 482 } 483 484 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that 485 this predicate should only be used during gimplification. See the 486 rationale for this in gimplify_modify_expr. */ 487 488 static bool 489 is_gimple_mem_rhs_or_call (tree t) 490 { 491 /* If we're dealing with a renamable type, either source or dest must be 492 a renamed variable. */ 493 if (is_gimple_reg_type (TREE_TYPE (t))) 494 return is_gimple_val (t); 495 else 496 return (is_gimple_val (t) 497 || is_gimple_lvalue (t) 498 || TREE_CLOBBER_P (t) 499 || TREE_CODE (t) == CALL_EXPR); 500 } 501 502 /* Create a temporary with a name derived from VAL. Subroutine of 503 lookup_tmp_var; nobody else should call this function. */ 504 505 static inline tree 506 create_tmp_from_val (tree val) 507 { 508 /* Drop all qualifiers and address-space information from the value type. */ 509 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val)); 510 tree var = create_tmp_var (type, get_name (val)); 511 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE 512 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE) 513 DECL_GIMPLE_REG_P (var) = 1; 514 return var; 515 } 516 517 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse 518 an existing expression temporary. */ 519 520 static tree 521 lookup_tmp_var (tree val, bool is_formal) 522 { 523 tree ret; 524 525 /* If not optimizing, never really reuse a temporary. local-alloc 526 won't allocate any variable that is used in more than one basic 527 block, which means it will go into memory, causing much extra 528 work in reload and final and poorer code generation, outweighing 529 the extra memory allocation here. */ 530 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val)) 531 ret = create_tmp_from_val (val); 532 else 533 { 534 elt_t elt, *elt_p; 535 elt_t **slot; 536 537 elt.val = val; 538 if (!gimplify_ctxp->temp_htab) 539 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000); 540 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT); 541 if (*slot == NULL) 542 { 543 elt_p = XNEW (elt_t); 544 elt_p->val = val; 545 elt_p->temp = ret = create_tmp_from_val (val); 546 *slot = elt_p; 547 } 548 else 549 { 550 elt_p = *slot; 551 ret = elt_p->temp; 552 } 553 } 554 555 return ret; 556 } 557 558 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */ 559 560 static tree 561 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p, 562 bool is_formal, bool allow_ssa) 563 { 564 tree t, mod; 565 566 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we 567 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */ 568 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call, 569 fb_rvalue); 570 571 if (allow_ssa 572 && gimplify_ctxp->into_ssa 573 && is_gimple_reg_type (TREE_TYPE (val))) 574 { 575 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val))); 576 if (! gimple_in_ssa_p (cfun)) 577 { 578 const char *name = get_name (val); 579 if (name) 580 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name)); 581 } 582 } 583 else 584 t = lookup_tmp_var (val, is_formal); 585 586 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val)); 587 588 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location)); 589 590 /* gimplify_modify_expr might want to reduce this further. */ 591 gimplify_and_add (mod, pre_p); 592 ggc_free (mod); 593 594 return t; 595 } 596 597 /* Return a formal temporary variable initialized with VAL. PRE_P is as 598 in gimplify_expr. Only use this function if: 599 600 1) The value of the unfactored expression represented by VAL will not 601 change between the initialization and use of the temporary, and 602 2) The temporary will not be otherwise modified. 603 604 For instance, #1 means that this is inappropriate for SAVE_EXPR temps, 605 and #2 means it is inappropriate for && temps. 606 607 For other cases, use get_initialized_tmp_var instead. */ 608 609 tree 610 get_formal_tmp_var (tree val, gimple_seq *pre_p) 611 { 612 return internal_get_tmp_var (val, pre_p, NULL, true, true); 613 } 614 615 /* Return a temporary variable initialized with VAL. PRE_P and POST_P 616 are as in gimplify_expr. */ 617 618 tree 619 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p, 620 bool allow_ssa) 621 { 622 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa); 623 } 624 625 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true, 626 generate debug info for them; otherwise don't. */ 627 628 void 629 declare_vars (tree vars, gimple *gs, bool debug_info) 630 { 631 tree last = vars; 632 if (last) 633 { 634 tree temps, block; 635 636 gbind *scope = as_a <gbind *> (gs); 637 638 temps = nreverse (last); 639 640 block = gimple_bind_block (scope); 641 gcc_assert (!block || TREE_CODE (block) == BLOCK); 642 if (!block || !debug_info) 643 { 644 DECL_CHAIN (last) = gimple_bind_vars (scope); 645 gimple_bind_set_vars (scope, temps); 646 } 647 else 648 { 649 /* We need to attach the nodes both to the BIND_EXPR and to its 650 associated BLOCK for debugging purposes. The key point here 651 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR 652 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */ 653 if (BLOCK_VARS (block)) 654 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps); 655 else 656 { 657 gimple_bind_set_vars (scope, 658 chainon (gimple_bind_vars (scope), temps)); 659 BLOCK_VARS (block) = temps; 660 } 661 } 662 } 663 } 664 665 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound 666 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if 667 no such upper bound can be obtained. */ 668 669 static void 670 force_constant_size (tree var) 671 { 672 /* The only attempt we make is by querying the maximum size of objects 673 of the variable's type. */ 674 675 HOST_WIDE_INT max_size; 676 677 gcc_assert (VAR_P (var)); 678 679 max_size = max_int_size_in_bytes (TREE_TYPE (var)); 680 681 gcc_assert (max_size >= 0); 682 683 DECL_SIZE_UNIT (var) 684 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size); 685 DECL_SIZE (var) 686 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT); 687 } 688 689 /* Push the temporary variable TMP into the current binding. */ 690 691 void 692 gimple_add_tmp_var_fn (struct function *fn, tree tmp) 693 { 694 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); 695 696 /* Later processing assumes that the object size is constant, which might 697 not be true at this point. Force the use of a constant upper bound in 698 this case. */ 699 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp))) 700 force_constant_size (tmp); 701 702 DECL_CONTEXT (tmp) = fn->decl; 703 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; 704 705 record_vars_into (tmp, fn->decl); 706 } 707 708 /* Push the temporary variable TMP into the current binding. */ 709 710 void 711 gimple_add_tmp_var (tree tmp) 712 { 713 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); 714 715 /* Later processing assumes that the object size is constant, which might 716 not be true at this point. Force the use of a constant upper bound in 717 this case. */ 718 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp))) 719 force_constant_size (tmp); 720 721 DECL_CONTEXT (tmp) = current_function_decl; 722 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; 723 724 if (gimplify_ctxp) 725 { 726 DECL_CHAIN (tmp) = gimplify_ctxp->temps; 727 gimplify_ctxp->temps = tmp; 728 729 /* Mark temporaries local within the nearest enclosing parallel. */ 730 if (gimplify_omp_ctxp) 731 { 732 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 733 while (ctx 734 && (ctx->region_type == ORT_WORKSHARE 735 || ctx->region_type == ORT_SIMD 736 || ctx->region_type == ORT_ACC)) 737 ctx = ctx->outer_context; 738 if (ctx) 739 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN); 740 } 741 } 742 else if (cfun) 743 record_vars (tmp); 744 else 745 { 746 gimple_seq body_seq; 747 748 /* This case is for nested functions. We need to expose the locals 749 they create. */ 750 body_seq = gimple_body (current_function_decl); 751 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false); 752 } 753 } 754 755 756 757 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree 758 nodes that are referenced more than once in GENERIC functions. This is 759 necessary because gimplification (translation into GIMPLE) is performed 760 by modifying tree nodes in-place, so gimplication of a shared node in a 761 first context could generate an invalid GIMPLE form in a second context. 762 763 This is achieved with a simple mark/copy/unmark algorithm that walks the 764 GENERIC representation top-down, marks nodes with TREE_VISITED the first 765 time it encounters them, duplicates them if they already have TREE_VISITED 766 set, and finally removes the TREE_VISITED marks it has set. 767 768 The algorithm works only at the function level, i.e. it generates a GENERIC 769 representation of a function with no nodes shared within the function when 770 passed a GENERIC function (except for nodes that are allowed to be shared). 771 772 At the global level, it is also necessary to unshare tree nodes that are 773 referenced in more than one function, for the same aforementioned reason. 774 This requires some cooperation from the front-end. There are 2 strategies: 775 776 1. Manual unsharing. The front-end needs to call unshare_expr on every 777 expression that might end up being shared across functions. 778 779 2. Deep unsharing. This is an extension of regular unsharing. Instead 780 of calling unshare_expr on expressions that might be shared across 781 functions, the front-end pre-marks them with TREE_VISITED. This will 782 ensure that they are unshared on the first reference within functions 783 when the regular unsharing algorithm runs. The counterpart is that 784 this algorithm must look deeper than for manual unsharing, which is 785 specified by LANG_HOOKS_DEEP_UNSHARING. 786 787 If there are only few specific cases of node sharing across functions, it is 788 probably easier for a front-end to unshare the expressions manually. On the 789 contrary, if the expressions generated at the global level are as widespread 790 as expressions generated within functions, deep unsharing is very likely the 791 way to go. */ 792 793 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes. 794 These nodes model computations that must be done once. If we were to 795 unshare something like SAVE_EXPR(i++), the gimplification process would 796 create wrong code. However, if DATA is non-null, it must hold a pointer 797 set that is used to unshare the subtrees of these nodes. */ 798 799 static tree 800 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data) 801 { 802 tree t = *tp; 803 enum tree_code code = TREE_CODE (t); 804 805 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but 806 copy their subtrees if we can make sure to do it only once. */ 807 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR) 808 { 809 if (data && !((hash_set<tree> *)data)->add (t)) 810 ; 811 else 812 *walk_subtrees = 0; 813 } 814 815 /* Stop at types, decls, constants like copy_tree_r. */ 816 else if (TREE_CODE_CLASS (code) == tcc_type 817 || TREE_CODE_CLASS (code) == tcc_declaration 818 || TREE_CODE_CLASS (code) == tcc_constant 819 /* We can't do anything sensible with a BLOCK used as an 820 expression, but we also can't just die when we see it 821 because of non-expression uses. So we avert our eyes 822 and cross our fingers. Silly Java. */ 823 || code == BLOCK) 824 *walk_subtrees = 0; 825 826 /* Cope with the statement expression extension. */ 827 else if (code == STATEMENT_LIST) 828 ; 829 830 /* Leave the bulk of the work to copy_tree_r itself. */ 831 else 832 copy_tree_r (tp, walk_subtrees, NULL); 833 834 return NULL_TREE; 835 } 836 837 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP. 838 If *TP has been visited already, then *TP is deeply copied by calling 839 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */ 840 841 static tree 842 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data) 843 { 844 tree t = *tp; 845 enum tree_code code = TREE_CODE (t); 846 847 /* Skip types, decls, and constants. But we do want to look at their 848 types and the bounds of types. Mark them as visited so we properly 849 unmark their subtrees on the unmark pass. If we've already seen them, 850 don't look down further. */ 851 if (TREE_CODE_CLASS (code) == tcc_type 852 || TREE_CODE_CLASS (code) == tcc_declaration 853 || TREE_CODE_CLASS (code) == tcc_constant) 854 { 855 if (TREE_VISITED (t)) 856 *walk_subtrees = 0; 857 else 858 TREE_VISITED (t) = 1; 859 } 860 861 /* If this node has been visited already, unshare it and don't look 862 any deeper. */ 863 else if (TREE_VISITED (t)) 864 { 865 walk_tree (tp, mostly_copy_tree_r, data, NULL); 866 *walk_subtrees = 0; 867 } 868 869 /* Otherwise, mark the node as visited and keep looking. */ 870 else 871 TREE_VISITED (t) = 1; 872 873 return NULL_TREE; 874 } 875 876 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the 877 copy_if_shared_r callback unmodified. */ 878 879 static inline void 880 copy_if_shared (tree *tp, void *data) 881 { 882 walk_tree (tp, copy_if_shared_r, data, NULL); 883 } 884 885 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of 886 any nested functions. */ 887 888 static void 889 unshare_body (tree fndecl) 890 { 891 struct cgraph_node *cgn = cgraph_node::get (fndecl); 892 /* If the language requires deep unsharing, we need a pointer set to make 893 sure we don't repeatedly unshare subtrees of unshareable nodes. */ 894 hash_set<tree> *visited 895 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL; 896 897 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited); 898 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited); 899 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited); 900 901 delete visited; 902 903 if (cgn) 904 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) 905 unshare_body (cgn->decl); 906 } 907 908 /* Callback for walk_tree to unmark the visited trees rooted at *TP. 909 Subtrees are walked until the first unvisited node is encountered. */ 910 911 static tree 912 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) 913 { 914 tree t = *tp; 915 916 /* If this node has been visited, unmark it and keep looking. */ 917 if (TREE_VISITED (t)) 918 TREE_VISITED (t) = 0; 919 920 /* Otherwise, don't look any deeper. */ 921 else 922 *walk_subtrees = 0; 923 924 return NULL_TREE; 925 } 926 927 /* Unmark the visited trees rooted at *TP. */ 928 929 static inline void 930 unmark_visited (tree *tp) 931 { 932 walk_tree (tp, unmark_visited_r, NULL, NULL); 933 } 934 935 /* Likewise, but mark all trees as not visited. */ 936 937 static void 938 unvisit_body (tree fndecl) 939 { 940 struct cgraph_node *cgn = cgraph_node::get (fndecl); 941 942 unmark_visited (&DECL_SAVED_TREE (fndecl)); 943 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl))); 944 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl))); 945 946 if (cgn) 947 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) 948 unvisit_body (cgn->decl); 949 } 950 951 /* Unconditionally make an unshared copy of EXPR. This is used when using 952 stored expressions which span multiple functions, such as BINFO_VTABLE, 953 as the normal unsharing process can't tell that they're shared. */ 954 955 tree 956 unshare_expr (tree expr) 957 { 958 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL); 959 return expr; 960 } 961 962 /* Worker for unshare_expr_without_location. */ 963 964 static tree 965 prune_expr_location (tree *tp, int *walk_subtrees, void *) 966 { 967 if (EXPR_P (*tp)) 968 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION); 969 else 970 *walk_subtrees = 0; 971 return NULL_TREE; 972 } 973 974 /* Similar to unshare_expr but also prune all expression locations 975 from EXPR. */ 976 977 tree 978 unshare_expr_without_location (tree expr) 979 { 980 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL); 981 if (EXPR_P (expr)) 982 walk_tree (&expr, prune_expr_location, NULL, NULL); 983 return expr; 984 } 985 986 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both 987 contain statements and have a value. Assign its value to a temporary 988 and give it void_type_node. Return the temporary, or NULL_TREE if 989 WRAPPER was already void. */ 990 991 tree 992 voidify_wrapper_expr (tree wrapper, tree temp) 993 { 994 tree type = TREE_TYPE (wrapper); 995 if (type && !VOID_TYPE_P (type)) 996 { 997 tree *p; 998 999 /* Set p to point to the body of the wrapper. Loop until we find 1000 something that isn't a wrapper. */ 1001 for (p = &wrapper; p && *p; ) 1002 { 1003 switch (TREE_CODE (*p)) 1004 { 1005 case BIND_EXPR: 1006 TREE_SIDE_EFFECTS (*p) = 1; 1007 TREE_TYPE (*p) = void_type_node; 1008 /* For a BIND_EXPR, the body is operand 1. */ 1009 p = &BIND_EXPR_BODY (*p); 1010 break; 1011 1012 case CLEANUP_POINT_EXPR: 1013 case TRY_FINALLY_EXPR: 1014 case TRY_CATCH_EXPR: 1015 TREE_SIDE_EFFECTS (*p) = 1; 1016 TREE_TYPE (*p) = void_type_node; 1017 p = &TREE_OPERAND (*p, 0); 1018 break; 1019 1020 case STATEMENT_LIST: 1021 { 1022 tree_stmt_iterator i = tsi_last (*p); 1023 TREE_SIDE_EFFECTS (*p) = 1; 1024 TREE_TYPE (*p) = void_type_node; 1025 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i); 1026 } 1027 break; 1028 1029 case COMPOUND_EXPR: 1030 /* Advance to the last statement. Set all container types to 1031 void. */ 1032 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1)) 1033 { 1034 TREE_SIDE_EFFECTS (*p) = 1; 1035 TREE_TYPE (*p) = void_type_node; 1036 } 1037 break; 1038 1039 case TRANSACTION_EXPR: 1040 TREE_SIDE_EFFECTS (*p) = 1; 1041 TREE_TYPE (*p) = void_type_node; 1042 p = &TRANSACTION_EXPR_BODY (*p); 1043 break; 1044 1045 default: 1046 /* Assume that any tree upon which voidify_wrapper_expr is 1047 directly called is a wrapper, and that its body is op0. */ 1048 if (p == &wrapper) 1049 { 1050 TREE_SIDE_EFFECTS (*p) = 1; 1051 TREE_TYPE (*p) = void_type_node; 1052 p = &TREE_OPERAND (*p, 0); 1053 break; 1054 } 1055 goto out; 1056 } 1057 } 1058 1059 out: 1060 if (p == NULL || IS_EMPTY_STMT (*p)) 1061 temp = NULL_TREE; 1062 else if (temp) 1063 { 1064 /* The wrapper is on the RHS of an assignment that we're pushing 1065 down. */ 1066 gcc_assert (TREE_CODE (temp) == INIT_EXPR 1067 || TREE_CODE (temp) == MODIFY_EXPR); 1068 TREE_OPERAND (temp, 1) = *p; 1069 *p = temp; 1070 } 1071 else 1072 { 1073 temp = create_tmp_var (type, "retval"); 1074 *p = build2 (INIT_EXPR, type, temp, *p); 1075 } 1076 1077 return temp; 1078 } 1079 1080 return NULL_TREE; 1081 } 1082 1083 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as 1084 a temporary through which they communicate. */ 1085 1086 static void 1087 build_stack_save_restore (gcall **save, gcall **restore) 1088 { 1089 tree tmp_var; 1090 1091 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0); 1092 tmp_var = create_tmp_var (ptr_type_node, "saved_stack"); 1093 gimple_call_set_lhs (*save, tmp_var); 1094 1095 *restore 1096 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE), 1097 1, tmp_var); 1098 } 1099 1100 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */ 1101 1102 static tree 1103 build_asan_poison_call_expr (tree decl) 1104 { 1105 /* Do not poison variables that have size equal to zero. */ 1106 tree unit_size = DECL_SIZE_UNIT (decl); 1107 if (zerop (unit_size)) 1108 return NULL_TREE; 1109 1110 tree base = build_fold_addr_expr (decl); 1111 1112 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK, 1113 void_type_node, 3, 1114 build_int_cst (integer_type_node, 1115 ASAN_MARK_POISON), 1116 base, unit_size); 1117 } 1118 1119 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending 1120 on POISON flag, shadow memory of a DECL variable. The call will be 1121 put on location identified by IT iterator, where BEFORE flag drives 1122 position where the stmt will be put. */ 1123 1124 static void 1125 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it, 1126 bool before) 1127 { 1128 tree unit_size = DECL_SIZE_UNIT (decl); 1129 tree base = build_fold_addr_expr (decl); 1130 1131 /* Do not poison variables that have size equal to zero. */ 1132 if (zerop (unit_size)) 1133 return; 1134 1135 /* It's necessary to have all stack variables aligned to ASAN granularity 1136 bytes. */ 1137 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY) 1138 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY); 1139 1140 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON; 1141 1142 gimple *g 1143 = gimple_build_call_internal (IFN_ASAN_MARK, 3, 1144 build_int_cst (integer_type_node, flags), 1145 base, unit_size); 1146 1147 if (before) 1148 gsi_insert_before (it, g, GSI_NEW_STMT); 1149 else 1150 gsi_insert_after (it, g, GSI_NEW_STMT); 1151 } 1152 1153 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag 1154 either poisons or unpoisons a DECL. Created statement is appended 1155 to SEQ_P gimple sequence. */ 1156 1157 static void 1158 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p) 1159 { 1160 gimple_stmt_iterator it = gsi_last (*seq_p); 1161 bool before = false; 1162 1163 if (gsi_end_p (it)) 1164 before = true; 1165 1166 asan_poison_variable (decl, poison, &it, before); 1167 } 1168 1169 /* Sort pair of VAR_DECLs A and B by DECL_UID. */ 1170 1171 static int 1172 sort_by_decl_uid (const void *a, const void *b) 1173 { 1174 const tree *t1 = (const tree *)a; 1175 const tree *t2 = (const tree *)b; 1176 1177 int uid1 = DECL_UID (*t1); 1178 int uid2 = DECL_UID (*t2); 1179 1180 if (uid1 < uid2) 1181 return -1; 1182 else if (uid1 > uid2) 1183 return 1; 1184 else 1185 return 0; 1186 } 1187 1188 /* Generate IFN_ASAN_MARK internal call for all VARIABLES 1189 depending on POISON flag. Created statement is appended 1190 to SEQ_P gimple sequence. */ 1191 1192 static void 1193 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p) 1194 { 1195 unsigned c = variables->elements (); 1196 if (c == 0) 1197 return; 1198 1199 auto_vec<tree> sorted_variables (c); 1200 1201 for (hash_set<tree>::iterator it = variables->begin (); 1202 it != variables->end (); ++it) 1203 sorted_variables.safe_push (*it); 1204 1205 sorted_variables.qsort (sort_by_decl_uid); 1206 1207 unsigned i; 1208 tree var; 1209 FOR_EACH_VEC_ELT (sorted_variables, i, var) 1210 { 1211 asan_poison_variable (var, poison, seq_p); 1212 1213 /* Add use_after_scope_memory attribute for the variable in order 1214 to prevent re-written into SSA. */ 1215 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE, 1216 DECL_ATTRIBUTES (var))) 1217 DECL_ATTRIBUTES (var) 1218 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE), 1219 integer_one_node, 1220 DECL_ATTRIBUTES (var)); 1221 } 1222 } 1223 1224 /* Gimplify a BIND_EXPR. Just voidify and recurse. */ 1225 1226 static enum gimplify_status 1227 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p) 1228 { 1229 tree bind_expr = *expr_p; 1230 bool old_keep_stack = gimplify_ctxp->keep_stack; 1231 bool old_save_stack = gimplify_ctxp->save_stack; 1232 tree t; 1233 gbind *bind_stmt; 1234 gimple_seq body, cleanup; 1235 gcall *stack_save; 1236 location_t start_locus = 0, end_locus = 0; 1237 tree ret_clauses = NULL; 1238 1239 tree temp = voidify_wrapper_expr (bind_expr, NULL); 1240 1241 /* Mark variables seen in this bind expr. */ 1242 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t)) 1243 { 1244 if (VAR_P (t)) 1245 { 1246 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 1247 1248 /* Mark variable as local. */ 1249 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t) 1250 && (! DECL_SEEN_IN_BIND_EXPR_P (t) 1251 || splay_tree_lookup (ctx->variables, 1252 (splay_tree_key) t) == NULL)) 1253 { 1254 if (ctx->region_type == ORT_SIMD 1255 && TREE_ADDRESSABLE (t) 1256 && !TREE_STATIC (t)) 1257 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN); 1258 else 1259 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN); 1260 } 1261 1262 DECL_SEEN_IN_BIND_EXPR_P (t) = 1; 1263 1264 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun) 1265 cfun->has_local_explicit_reg_vars = true; 1266 } 1267 1268 /* Preliminarily mark non-addressed complex variables as eligible 1269 for promotion to gimple registers. We'll transform their uses 1270 as we find them. */ 1271 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE 1272 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) 1273 && !TREE_THIS_VOLATILE (t) 1274 && (VAR_P (t) && !DECL_HARD_REGISTER (t)) 1275 && !needs_to_live_in_memory (t)) 1276 DECL_GIMPLE_REG_P (t) = 1; 1277 } 1278 1279 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL, 1280 BIND_EXPR_BLOCK (bind_expr)); 1281 gimple_push_bind_expr (bind_stmt); 1282 1283 gimplify_ctxp->keep_stack = false; 1284 gimplify_ctxp->save_stack = false; 1285 1286 /* Gimplify the body into the GIMPLE_BIND tuple's body. */ 1287 body = NULL; 1288 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body); 1289 gimple_bind_set_body (bind_stmt, body); 1290 1291 /* Source location wise, the cleanup code (stack_restore and clobbers) 1292 belongs to the end of the block, so propagate what we have. The 1293 stack_save operation belongs to the beginning of block, which we can 1294 infer from the bind_expr directly if the block has no explicit 1295 assignment. */ 1296 if (BIND_EXPR_BLOCK (bind_expr)) 1297 { 1298 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr)); 1299 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr)); 1300 } 1301 if (start_locus == 0) 1302 start_locus = EXPR_LOCATION (bind_expr); 1303 1304 cleanup = NULL; 1305 stack_save = NULL; 1306 1307 /* If the code both contains VLAs and calls alloca, then we cannot reclaim 1308 the stack space allocated to the VLAs. */ 1309 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack) 1310 { 1311 gcall *stack_restore; 1312 1313 /* Save stack on entry and restore it on exit. Add a try_finally 1314 block to achieve this. */ 1315 build_stack_save_restore (&stack_save, &stack_restore); 1316 1317 gimple_set_location (stack_save, start_locus); 1318 gimple_set_location (stack_restore, end_locus); 1319 1320 gimplify_seq_add_stmt (&cleanup, stack_restore); 1321 } 1322 1323 /* Add clobbers for all variables that go out of scope. */ 1324 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t)) 1325 { 1326 if (VAR_P (t) 1327 && !is_global_var (t) 1328 && DECL_CONTEXT (t) == current_function_decl) 1329 { 1330 if (!DECL_HARD_REGISTER (t) 1331 && !TREE_THIS_VOLATILE (t) 1332 && !DECL_HAS_VALUE_EXPR_P (t) 1333 /* Only care for variables that have to be in memory. Others 1334 will be rewritten into SSA names, hence moved to the 1335 top-level. */ 1336 && !is_gimple_reg (t) 1337 && flag_stack_reuse != SR_NONE) 1338 { 1339 tree clobber = build_constructor (TREE_TYPE (t), NULL); 1340 gimple *clobber_stmt; 1341 TREE_THIS_VOLATILE (clobber) = 1; 1342 clobber_stmt = gimple_build_assign (t, clobber); 1343 gimple_set_location (clobber_stmt, end_locus); 1344 gimplify_seq_add_stmt (&cleanup, clobber_stmt); 1345 } 1346 1347 if (flag_openacc && oacc_declare_returns != NULL) 1348 { 1349 tree *c = oacc_declare_returns->get (t); 1350 if (c != NULL) 1351 { 1352 if (ret_clauses) 1353 OMP_CLAUSE_CHAIN (*c) = ret_clauses; 1354 1355 ret_clauses = *c; 1356 1357 oacc_declare_returns->remove (t); 1358 1359 if (oacc_declare_returns->elements () == 0) 1360 { 1361 delete oacc_declare_returns; 1362 oacc_declare_returns = NULL; 1363 } 1364 } 1365 } 1366 } 1367 1368 if (asan_poisoned_variables != NULL 1369 && asan_poisoned_variables->contains (t)) 1370 { 1371 asan_poisoned_variables->remove (t); 1372 asan_poison_variable (t, true, &cleanup); 1373 } 1374 1375 if (gimplify_ctxp->live_switch_vars != NULL 1376 && gimplify_ctxp->live_switch_vars->contains (t)) 1377 gimplify_ctxp->live_switch_vars->remove (t); 1378 } 1379 1380 if (ret_clauses) 1381 { 1382 gomp_target *stmt; 1383 gimple_stmt_iterator si = gsi_start (cleanup); 1384 1385 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE, 1386 ret_clauses); 1387 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT); 1388 } 1389 1390 if (cleanup) 1391 { 1392 gtry *gs; 1393 gimple_seq new_body; 1394 1395 new_body = NULL; 1396 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup, 1397 GIMPLE_TRY_FINALLY); 1398 1399 if (stack_save) 1400 gimplify_seq_add_stmt (&new_body, stack_save); 1401 gimplify_seq_add_stmt (&new_body, gs); 1402 gimple_bind_set_body (bind_stmt, new_body); 1403 } 1404 1405 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */ 1406 if (!gimplify_ctxp->keep_stack) 1407 gimplify_ctxp->keep_stack = old_keep_stack; 1408 gimplify_ctxp->save_stack = old_save_stack; 1409 1410 gimple_pop_bind_expr (); 1411 1412 gimplify_seq_add_stmt (pre_p, bind_stmt); 1413 1414 if (temp) 1415 { 1416 *expr_p = temp; 1417 return GS_OK; 1418 } 1419 1420 *expr_p = NULL_TREE; 1421 return GS_ALL_DONE; 1422 } 1423 1424 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a 1425 GIMPLE value, it is assigned to a new temporary and the statement is 1426 re-written to return the temporary. 1427 1428 PRE_P points to the sequence where side effects that must happen before 1429 STMT should be stored. */ 1430 1431 static enum gimplify_status 1432 gimplify_return_expr (tree stmt, gimple_seq *pre_p) 1433 { 1434 greturn *ret; 1435 tree ret_expr = TREE_OPERAND (stmt, 0); 1436 tree result_decl, result; 1437 1438 if (ret_expr == error_mark_node) 1439 return GS_ERROR; 1440 1441 /* Implicit _Cilk_sync must be inserted right before any return statement 1442 if there is a _Cilk_spawn in the function. If the user has provided a 1443 _Cilk_sync, the optimizer should remove this duplicate one. */ 1444 if (fn_contains_cilk_spawn_p (cfun)) 1445 { 1446 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node); 1447 gimplify_and_add (impl_sync, pre_p); 1448 } 1449 1450 if (!ret_expr 1451 || TREE_CODE (ret_expr) == RESULT_DECL 1452 || ret_expr == error_mark_node) 1453 { 1454 greturn *ret = gimple_build_return (ret_expr); 1455 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); 1456 gimplify_seq_add_stmt (pre_p, ret); 1457 return GS_ALL_DONE; 1458 } 1459 1460 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))) 1461 result_decl = NULL_TREE; 1462 else 1463 { 1464 result_decl = TREE_OPERAND (ret_expr, 0); 1465 1466 /* See through a return by reference. */ 1467 if (TREE_CODE (result_decl) == INDIRECT_REF) 1468 result_decl = TREE_OPERAND (result_decl, 0); 1469 1470 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR 1471 || TREE_CODE (ret_expr) == INIT_EXPR) 1472 && TREE_CODE (result_decl) == RESULT_DECL); 1473 } 1474 1475 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL. 1476 Recall that aggregate_value_p is FALSE for any aggregate type that is 1477 returned in registers. If we're returning values in registers, then 1478 we don't want to extend the lifetime of the RESULT_DECL, particularly 1479 across another call. In addition, for those aggregates for which 1480 hard_function_value generates a PARALLEL, we'll die during normal 1481 expansion of structure assignments; there's special code in expand_return 1482 to handle this case that does not exist in expand_expr. */ 1483 if (!result_decl) 1484 result = NULL_TREE; 1485 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl))) 1486 { 1487 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST) 1488 { 1489 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl))) 1490 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p); 1491 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL 1492 should be effectively allocated by the caller, i.e. all calls to 1493 this function must be subject to the Return Slot Optimization. */ 1494 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p); 1495 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p); 1496 } 1497 result = result_decl; 1498 } 1499 else if (gimplify_ctxp->return_temp) 1500 result = gimplify_ctxp->return_temp; 1501 else 1502 { 1503 result = create_tmp_reg (TREE_TYPE (result_decl)); 1504 1505 /* ??? With complex control flow (usually involving abnormal edges), 1506 we can wind up warning about an uninitialized value for this. Due 1507 to how this variable is constructed and initialized, this is never 1508 true. Give up and never warn. */ 1509 TREE_NO_WARNING (result) = 1; 1510 1511 gimplify_ctxp->return_temp = result; 1512 } 1513 1514 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use. 1515 Then gimplify the whole thing. */ 1516 if (result != result_decl) 1517 TREE_OPERAND (ret_expr, 0) = result; 1518 1519 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p); 1520 1521 ret = gimple_build_return (result); 1522 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); 1523 gimplify_seq_add_stmt (pre_p, ret); 1524 1525 return GS_ALL_DONE; 1526 } 1527 1528 /* Gimplify a variable-length array DECL. */ 1529 1530 static void 1531 gimplify_vla_decl (tree decl, gimple_seq *seq_p) 1532 { 1533 /* This is a variable-sized decl. Simplify its size and mark it 1534 for deferred expansion. */ 1535 tree t, addr, ptr_type; 1536 1537 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p); 1538 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p); 1539 1540 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */ 1541 if (DECL_HAS_VALUE_EXPR_P (decl)) 1542 return; 1543 1544 /* All occurrences of this decl in final gimplified code will be 1545 replaced by indirection. Setting DECL_VALUE_EXPR does two 1546 things: First, it lets the rest of the gimplifier know what 1547 replacement to use. Second, it lets the debug info know 1548 where to find the value. */ 1549 ptr_type = build_pointer_type (TREE_TYPE (decl)); 1550 addr = create_tmp_var (ptr_type, get_name (decl)); 1551 DECL_IGNORED_P (addr) = 0; 1552 t = build_fold_indirect_ref (addr); 1553 TREE_THIS_NOTRAP (t) = 1; 1554 SET_DECL_VALUE_EXPR (decl, t); 1555 DECL_HAS_VALUE_EXPR_P (decl) = 1; 1556 1557 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN); 1558 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl), 1559 size_int (DECL_ALIGN (decl))); 1560 /* The call has been built for a variable-sized object. */ 1561 CALL_ALLOCA_FOR_VAR_P (t) = 1; 1562 t = fold_convert (ptr_type, t); 1563 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t); 1564 1565 gimplify_and_add (t, seq_p); 1566 } 1567 1568 /* A helper function to be called via walk_tree. Mark all labels under *TP 1569 as being forced. To be called for DECL_INITIAL of static variables. */ 1570 1571 static tree 1572 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) 1573 { 1574 if (TYPE_P (*tp)) 1575 *walk_subtrees = 0; 1576 if (TREE_CODE (*tp) == LABEL_DECL) 1577 { 1578 FORCED_LABEL (*tp) = 1; 1579 cfun->has_forced_label_in_static = 1; 1580 } 1581 1582 return NULL_TREE; 1583 } 1584 1585 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation 1586 and initialization explicit. */ 1587 1588 static enum gimplify_status 1589 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p) 1590 { 1591 tree stmt = *stmt_p; 1592 tree decl = DECL_EXPR_DECL (stmt); 1593 1594 *stmt_p = NULL_TREE; 1595 1596 if (TREE_TYPE (decl) == error_mark_node) 1597 return GS_ERROR; 1598 1599 if ((TREE_CODE (decl) == TYPE_DECL 1600 || VAR_P (decl)) 1601 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl))) 1602 { 1603 gimplify_type_sizes (TREE_TYPE (decl), seq_p); 1604 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE) 1605 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p); 1606 } 1607 1608 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified 1609 in case its size expressions contain problematic nodes like CALL_EXPR. */ 1610 if (TREE_CODE (decl) == TYPE_DECL 1611 && DECL_ORIGINAL_TYPE (decl) 1612 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl))) 1613 { 1614 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p); 1615 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE) 1616 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p); 1617 } 1618 1619 if (VAR_P (decl) && !DECL_EXTERNAL (decl)) 1620 { 1621 tree init = DECL_INITIAL (decl); 1622 bool is_vla = false; 1623 1624 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST 1625 || (!TREE_STATIC (decl) 1626 && flag_stack_check == GENERIC_STACK_CHECK 1627 && compare_tree_int (DECL_SIZE_UNIT (decl), 1628 STACK_CHECK_MAX_VAR_SIZE) > 0)) 1629 { 1630 gimplify_vla_decl (decl, seq_p); 1631 is_vla = true; 1632 } 1633 1634 if (asan_poisoned_variables 1635 && !is_vla 1636 && TREE_ADDRESSABLE (decl) 1637 && !TREE_STATIC (decl) 1638 && !DECL_HAS_VALUE_EXPR_P (decl) 1639 && dbg_cnt (asan_use_after_scope) 1640 && !gimplify_omp_ctxp) 1641 { 1642 asan_poisoned_variables->add (decl); 1643 asan_poison_variable (decl, false, seq_p); 1644 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars) 1645 gimplify_ctxp->live_switch_vars->add (decl); 1646 } 1647 1648 /* Some front ends do not explicitly declare all anonymous 1649 artificial variables. We compensate here by declaring the 1650 variables, though it would be better if the front ends would 1651 explicitly declare them. */ 1652 if (!DECL_SEEN_IN_BIND_EXPR_P (decl) 1653 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE) 1654 gimple_add_tmp_var (decl); 1655 1656 if (init && init != error_mark_node) 1657 { 1658 if (!TREE_STATIC (decl)) 1659 { 1660 DECL_INITIAL (decl) = NULL_TREE; 1661 init = build2 (INIT_EXPR, void_type_node, decl, init); 1662 gimplify_and_add (init, seq_p); 1663 ggc_free (init); 1664 } 1665 else 1666 /* We must still examine initializers for static variables 1667 as they may contain a label address. */ 1668 walk_tree (&init, force_labels_r, NULL, NULL); 1669 } 1670 } 1671 1672 return GS_ALL_DONE; 1673 } 1674 1675 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body 1676 and replacing the LOOP_EXPR with goto, but if the loop contains an 1677 EXIT_EXPR, we need to append a label for it to jump to. */ 1678 1679 static enum gimplify_status 1680 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p) 1681 { 1682 tree saved_label = gimplify_ctxp->exit_label; 1683 tree start_label = create_artificial_label (UNKNOWN_LOCATION); 1684 1685 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label)); 1686 1687 gimplify_ctxp->exit_label = NULL_TREE; 1688 1689 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p); 1690 1691 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label)); 1692 1693 if (gimplify_ctxp->exit_label) 1694 gimplify_seq_add_stmt (pre_p, 1695 gimple_build_label (gimplify_ctxp->exit_label)); 1696 1697 gimplify_ctxp->exit_label = saved_label; 1698 1699 *expr_p = NULL; 1700 return GS_ALL_DONE; 1701 } 1702 1703 /* Gimplify a statement list onto a sequence. These may be created either 1704 by an enlightened front-end, or by shortcut_cond_expr. */ 1705 1706 static enum gimplify_status 1707 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p) 1708 { 1709 tree temp = voidify_wrapper_expr (*expr_p, NULL); 1710 1711 tree_stmt_iterator i = tsi_start (*expr_p); 1712 1713 while (!tsi_end_p (i)) 1714 { 1715 gimplify_stmt (tsi_stmt_ptr (i), pre_p); 1716 tsi_delink (&i); 1717 } 1718 1719 if (temp) 1720 { 1721 *expr_p = temp; 1722 return GS_OK; 1723 } 1724 1725 return GS_ALL_DONE; 1726 } 1727 1728 /* Callback for walk_gimple_seq. */ 1729 1730 static tree 1731 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, 1732 struct walk_stmt_info *wi) 1733 { 1734 gimple *stmt = gsi_stmt (*gsi_p); 1735 1736 *handled_ops_p = true; 1737 switch (gimple_code (stmt)) 1738 { 1739 case GIMPLE_TRY: 1740 /* A compiler-generated cleanup or a user-written try block. 1741 If it's empty, don't dive into it--that would result in 1742 worse location info. */ 1743 if (gimple_try_eval (stmt) == NULL) 1744 { 1745 wi->info = stmt; 1746 return integer_zero_node; 1747 } 1748 /* Fall through. */ 1749 case GIMPLE_BIND: 1750 case GIMPLE_CATCH: 1751 case GIMPLE_EH_FILTER: 1752 case GIMPLE_TRANSACTION: 1753 /* Walk the sub-statements. */ 1754 *handled_ops_p = false; 1755 break; 1756 case GIMPLE_CALL: 1757 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK)) 1758 { 1759 *handled_ops_p = false; 1760 break; 1761 } 1762 /* Fall through. */ 1763 default: 1764 /* Save the first "real" statement (not a decl/lexical scope/...). */ 1765 wi->info = stmt; 1766 return integer_zero_node; 1767 } 1768 return NULL_TREE; 1769 } 1770 1771 /* Possibly warn about unreachable statements between switch's controlling 1772 expression and the first case. SEQ is the body of a switch expression. */ 1773 1774 static void 1775 maybe_warn_switch_unreachable (gimple_seq seq) 1776 { 1777 if (!warn_switch_unreachable 1778 /* This warning doesn't play well with Fortran when optimizations 1779 are on. */ 1780 || lang_GNU_Fortran () 1781 || seq == NULL) 1782 return; 1783 1784 struct walk_stmt_info wi; 1785 memset (&wi, 0, sizeof (wi)); 1786 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi); 1787 gimple *stmt = (gimple *) wi.info; 1788 1789 if (stmt && gimple_code (stmt) != GIMPLE_LABEL) 1790 { 1791 if (gimple_code (stmt) == GIMPLE_GOTO 1792 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL 1793 && DECL_ARTIFICIAL (gimple_goto_dest (stmt))) 1794 /* Don't warn for compiler-generated gotos. These occur 1795 in Duff's devices, for example. */; 1796 else 1797 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable, 1798 "statement will never be executed"); 1799 } 1800 } 1801 1802 1803 /* A label entry that pairs label and a location. */ 1804 struct label_entry 1805 { 1806 tree label; 1807 location_t loc; 1808 }; 1809 1810 /* Find LABEL in vector of label entries VEC. */ 1811 1812 static struct label_entry * 1813 find_label_entry (const auto_vec<struct label_entry> *vec, tree label) 1814 { 1815 unsigned int i; 1816 struct label_entry *l; 1817 1818 FOR_EACH_VEC_ELT (*vec, i, l) 1819 if (l->label == label) 1820 return l; 1821 return NULL; 1822 } 1823 1824 /* Return true if LABEL, a LABEL_DECL, represents a case label 1825 in a vector of labels CASES. */ 1826 1827 static bool 1828 case_label_p (const vec<tree> *cases, tree label) 1829 { 1830 unsigned int i; 1831 tree l; 1832 1833 FOR_EACH_VEC_ELT (*cases, i, l) 1834 if (CASE_LABEL (l) == label) 1835 return true; 1836 return false; 1837 } 1838 1839 /* Find the last statement in a scope STMT. */ 1840 1841 static gimple * 1842 last_stmt_in_scope (gimple *stmt) 1843 { 1844 if (!stmt) 1845 return NULL; 1846 1847 switch (gimple_code (stmt)) 1848 { 1849 case GIMPLE_BIND: 1850 { 1851 gbind *bind = as_a <gbind *> (stmt); 1852 stmt = gimple_seq_last_stmt (gimple_bind_body (bind)); 1853 return last_stmt_in_scope (stmt); 1854 } 1855 1856 case GIMPLE_TRY: 1857 { 1858 gtry *try_stmt = as_a <gtry *> (stmt); 1859 stmt = gimple_seq_last_stmt (gimple_try_eval (try_stmt)); 1860 gimple *last_eval = last_stmt_in_scope (stmt); 1861 if (gimple_stmt_may_fallthru (last_eval) 1862 && (last_eval == NULL 1863 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH)) 1864 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY) 1865 { 1866 stmt = gimple_seq_last_stmt (gimple_try_cleanup (try_stmt)); 1867 return last_stmt_in_scope (stmt); 1868 } 1869 else 1870 return last_eval; 1871 } 1872 1873 default: 1874 return stmt; 1875 } 1876 } 1877 1878 /* Collect interesting labels in LABELS and return the statement preceding 1879 another case label, or a user-defined label. */ 1880 1881 static gimple * 1882 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p, 1883 auto_vec <struct label_entry> *labels) 1884 { 1885 gimple *prev = NULL; 1886 1887 do 1888 { 1889 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND 1890 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY) 1891 { 1892 /* Nested scope. Only look at the last statement of 1893 the innermost scope. */ 1894 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p)); 1895 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p)); 1896 if (last) 1897 { 1898 prev = last; 1899 /* It might be a label without a location. Use the 1900 location of the scope then. */ 1901 if (!gimple_has_location (prev)) 1902 gimple_set_location (prev, bind_loc); 1903 } 1904 gsi_next (gsi_p); 1905 continue; 1906 } 1907 1908 /* Ifs are tricky. */ 1909 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND) 1910 { 1911 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p)); 1912 tree false_lab = gimple_cond_false_label (cond_stmt); 1913 location_t if_loc = gimple_location (cond_stmt); 1914 1915 /* If we have e.g. 1916 if (i > 1) goto <D.2259>; else goto D; 1917 we can't do much with the else-branch. */ 1918 if (!DECL_ARTIFICIAL (false_lab)) 1919 break; 1920 1921 /* Go on until the false label, then one step back. */ 1922 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p)) 1923 { 1924 gimple *stmt = gsi_stmt (*gsi_p); 1925 if (gimple_code (stmt) == GIMPLE_LABEL 1926 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab) 1927 break; 1928 } 1929 1930 /* Not found? Oops. */ 1931 if (gsi_end_p (*gsi_p)) 1932 break; 1933 1934 struct label_entry l = { false_lab, if_loc }; 1935 labels->safe_push (l); 1936 1937 /* Go to the last statement of the then branch. */ 1938 gsi_prev (gsi_p); 1939 1940 /* if (i != 0) goto <D.1759>; else goto <D.1760>; 1941 <D.1759>: 1942 <stmt>; 1943 goto <D.1761>; 1944 <D.1760>: 1945 */ 1946 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO 1947 && !gimple_has_location (gsi_stmt (*gsi_p))) 1948 { 1949 /* Look at the statement before, it might be 1950 attribute fallthrough, in which case don't warn. */ 1951 gsi_prev (gsi_p); 1952 bool fallthru_before_dest 1953 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH); 1954 gsi_next (gsi_p); 1955 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p)); 1956 if (!fallthru_before_dest) 1957 { 1958 struct label_entry l = { goto_dest, if_loc }; 1959 labels->safe_push (l); 1960 } 1961 } 1962 /* And move back. */ 1963 gsi_next (gsi_p); 1964 } 1965 1966 /* Remember the last statement. Skip labels that are of no interest 1967 to us. */ 1968 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL) 1969 { 1970 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p))); 1971 if (find_label_entry (labels, label)) 1972 prev = gsi_stmt (*gsi_p); 1973 } 1974 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK)) 1975 ; 1976 else 1977 prev = gsi_stmt (*gsi_p); 1978 gsi_next (gsi_p); 1979 } 1980 while (!gsi_end_p (*gsi_p) 1981 /* Stop if we find a case or a user-defined label. */ 1982 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL 1983 || !gimple_has_location (gsi_stmt (*gsi_p)))); 1984 1985 return prev; 1986 } 1987 1988 /* Return true if the switch fallthough warning should occur. LABEL is 1989 the label statement that we're falling through to. */ 1990 1991 static bool 1992 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label) 1993 { 1994 gimple_stmt_iterator gsi = *gsi_p; 1995 1996 /* Don't warn if the label is marked with a "falls through" comment. */ 1997 if (FALLTHROUGH_LABEL_P (label)) 1998 return false; 1999 2000 /* Don't warn for non-case labels followed by a statement: 2001 case 0: 2002 foo (); 2003 label: 2004 bar (); 2005 as these are likely intentional. */ 2006 if (!case_label_p (&gimplify_ctxp->case_labels, label)) 2007 { 2008 tree l; 2009 while (!gsi_end_p (gsi) 2010 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL 2011 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi)))) 2012 && !case_label_p (&gimplify_ctxp->case_labels, l)) 2013 gsi_next (&gsi); 2014 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL) 2015 return false; 2016 } 2017 2018 /* Don't warn for terminated branches, i.e. when the subsequent case labels 2019 immediately breaks. */ 2020 gsi = *gsi_p; 2021 2022 /* Skip all immediately following labels. */ 2023 while (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL) 2024 gsi_next (&gsi); 2025 2026 /* { ... something; default:; } */ 2027 if (gsi_end_p (gsi) 2028 /* { ... something; default: break; } or 2029 { ... something; default: goto L; } */ 2030 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO 2031 /* { ... something; default: return; } */ 2032 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN) 2033 return false; 2034 2035 return true; 2036 } 2037 2038 /* Callback for walk_gimple_seq. */ 2039 2040 static tree 2041 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, 2042 struct walk_stmt_info *) 2043 { 2044 gimple *stmt = gsi_stmt (*gsi_p); 2045 2046 *handled_ops_p = true; 2047 switch (gimple_code (stmt)) 2048 { 2049 case GIMPLE_TRY: 2050 case GIMPLE_BIND: 2051 case GIMPLE_CATCH: 2052 case GIMPLE_EH_FILTER: 2053 case GIMPLE_TRANSACTION: 2054 /* Walk the sub-statements. */ 2055 *handled_ops_p = false; 2056 break; 2057 2058 /* Find a sequence of form: 2059 2060 GIMPLE_LABEL 2061 [...] 2062 <may fallthru stmt> 2063 GIMPLE_LABEL 2064 2065 and possibly warn. */ 2066 case GIMPLE_LABEL: 2067 { 2068 /* Found a label. Skip all immediately following labels. */ 2069 while (!gsi_end_p (*gsi_p) 2070 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL) 2071 gsi_next (gsi_p); 2072 2073 /* There might be no more statements. */ 2074 if (gsi_end_p (*gsi_p)) 2075 return integer_zero_node; 2076 2077 /* Vector of labels that fall through. */ 2078 auto_vec <struct label_entry> labels; 2079 gimple *prev = collect_fallthrough_labels (gsi_p, &labels); 2080 2081 /* There might be no more statements. */ 2082 if (gsi_end_p (*gsi_p)) 2083 return integer_zero_node; 2084 2085 gimple *next = gsi_stmt (*gsi_p); 2086 tree label; 2087 /* If what follows is a label, then we may have a fallthrough. */ 2088 if (gimple_code (next) == GIMPLE_LABEL 2089 && gimple_has_location (next) 2090 && (label = gimple_label_label (as_a <glabel *> (next))) 2091 && prev != NULL) 2092 { 2093 struct label_entry *l; 2094 bool warned_p = false; 2095 if (!should_warn_for_implicit_fallthrough (gsi_p, label)) 2096 /* Quiet. */; 2097 else if (gimple_code (prev) == GIMPLE_LABEL 2098 && (label = gimple_label_label (as_a <glabel *> (prev))) 2099 && (l = find_label_entry (&labels, label))) 2100 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_, 2101 "this statement may fall through"); 2102 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH) 2103 /* Try to be clever and don't warn when the statement 2104 can't actually fall through. */ 2105 && gimple_stmt_may_fallthru (prev) 2106 && gimple_has_location (prev)) 2107 warned_p = warning_at (gimple_location (prev), 2108 OPT_Wimplicit_fallthrough_, 2109 "this statement may fall through"); 2110 if (warned_p) 2111 inform (gimple_location (next), "here"); 2112 2113 /* Mark this label as processed so as to prevent multiple 2114 warnings in nested switches. */ 2115 FALLTHROUGH_LABEL_P (label) = true; 2116 2117 /* So that next warn_implicit_fallthrough_r will start looking for 2118 a new sequence starting with this label. */ 2119 gsi_prev (gsi_p); 2120 } 2121 } 2122 break; 2123 default: 2124 break; 2125 } 2126 return NULL_TREE; 2127 } 2128 2129 /* Warn when a switch case falls through. */ 2130 2131 static void 2132 maybe_warn_implicit_fallthrough (gimple_seq seq) 2133 { 2134 if (!warn_implicit_fallthrough) 2135 return; 2136 2137 /* This warning is meant for C/C++/ObjC/ObjC++ only. */ 2138 if (!(lang_GNU_C () 2139 || lang_GNU_CXX () 2140 || lang_GNU_OBJC ())) 2141 return; 2142 2143 struct walk_stmt_info wi; 2144 memset (&wi, 0, sizeof (wi)); 2145 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi); 2146 } 2147 2148 /* Callback for walk_gimple_seq. */ 2149 2150 static tree 2151 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, 2152 struct walk_stmt_info *) 2153 { 2154 gimple *stmt = gsi_stmt (*gsi_p); 2155 2156 *handled_ops_p = true; 2157 switch (gimple_code (stmt)) 2158 { 2159 case GIMPLE_TRY: 2160 case GIMPLE_BIND: 2161 case GIMPLE_CATCH: 2162 case GIMPLE_EH_FILTER: 2163 case GIMPLE_TRANSACTION: 2164 /* Walk the sub-statements. */ 2165 *handled_ops_p = false; 2166 break; 2167 case GIMPLE_CALL: 2168 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH)) 2169 { 2170 gsi_remove (gsi_p, true); 2171 if (gsi_end_p (*gsi_p)) 2172 return integer_zero_node; 2173 2174 bool found = false; 2175 location_t loc = gimple_location (stmt); 2176 2177 gimple_stmt_iterator gsi2 = *gsi_p; 2178 stmt = gsi_stmt (gsi2); 2179 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt)) 2180 { 2181 /* Go on until the artificial label. */ 2182 tree goto_dest = gimple_goto_dest (stmt); 2183 for (; !gsi_end_p (gsi2); gsi_next (&gsi2)) 2184 { 2185 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL 2186 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2))) 2187 == goto_dest) 2188 break; 2189 } 2190 2191 /* Not found? Stop. */ 2192 if (gsi_end_p (gsi2)) 2193 break; 2194 2195 /* Look one past it. */ 2196 gsi_next (&gsi2); 2197 } 2198 2199 /* We're looking for a case label or default label here. */ 2200 while (!gsi_end_p (gsi2)) 2201 { 2202 stmt = gsi_stmt (gsi2); 2203 if (gimple_code (stmt) == GIMPLE_LABEL) 2204 { 2205 tree label = gimple_label_label (as_a <glabel *> (stmt)); 2206 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label)) 2207 { 2208 found = true; 2209 break; 2210 } 2211 } 2212 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK)) 2213 ; 2214 else 2215 /* Something other is not expected. */ 2216 break; 2217 gsi_next (&gsi2); 2218 } 2219 if (!found) 2220 warning_at (loc, 0, "attribute %<fallthrough%> not preceding " 2221 "a case label or default label"); 2222 } 2223 break; 2224 default: 2225 break; 2226 } 2227 return NULL_TREE; 2228 } 2229 2230 /* Expand all FALLTHROUGH () calls in SEQ. */ 2231 2232 static void 2233 expand_FALLTHROUGH (gimple_seq *seq_p) 2234 { 2235 struct walk_stmt_info wi; 2236 memset (&wi, 0, sizeof (wi)); 2237 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi); 2238 } 2239 2240 2241 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can 2242 branch to. */ 2243 2244 static enum gimplify_status 2245 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p) 2246 { 2247 tree switch_expr = *expr_p; 2248 gimple_seq switch_body_seq = NULL; 2249 enum gimplify_status ret; 2250 tree index_type = TREE_TYPE (switch_expr); 2251 if (index_type == NULL_TREE) 2252 index_type = TREE_TYPE (SWITCH_COND (switch_expr)); 2253 2254 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val, 2255 fb_rvalue); 2256 if (ret == GS_ERROR || ret == GS_UNHANDLED) 2257 return ret; 2258 2259 if (SWITCH_BODY (switch_expr)) 2260 { 2261 vec<tree> labels; 2262 vec<tree> saved_labels; 2263 hash_set<tree> *saved_live_switch_vars = NULL; 2264 tree default_case = NULL_TREE; 2265 gswitch *switch_stmt; 2266 2267 /* If someone can be bothered to fill in the labels, they can 2268 be bothered to null out the body too. */ 2269 gcc_assert (!SWITCH_LABELS (switch_expr)); 2270 2271 /* Save old labels, get new ones from body, then restore the old 2272 labels. Save all the things from the switch body to append after. */ 2273 saved_labels = gimplify_ctxp->case_labels; 2274 gimplify_ctxp->case_labels.create (8); 2275 2276 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */ 2277 saved_live_switch_vars = gimplify_ctxp->live_switch_vars; 2278 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr)); 2279 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST) 2280 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4); 2281 else 2282 gimplify_ctxp->live_switch_vars = NULL; 2283 2284 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr; 2285 gimplify_ctxp->in_switch_expr = true; 2286 2287 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq); 2288 2289 gimplify_ctxp->in_switch_expr = old_in_switch_expr; 2290 maybe_warn_switch_unreachable (switch_body_seq); 2291 maybe_warn_implicit_fallthrough (switch_body_seq); 2292 /* Only do this for the outermost GIMPLE_SWITCH. */ 2293 if (!gimplify_ctxp->in_switch_expr) 2294 expand_FALLTHROUGH (&switch_body_seq); 2295 2296 labels = gimplify_ctxp->case_labels; 2297 gimplify_ctxp->case_labels = saved_labels; 2298 2299 if (gimplify_ctxp->live_switch_vars) 2300 { 2301 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0); 2302 delete gimplify_ctxp->live_switch_vars; 2303 } 2304 gimplify_ctxp->live_switch_vars = saved_live_switch_vars; 2305 2306 preprocess_case_label_vec_for_gimple (labels, index_type, 2307 &default_case); 2308 2309 if (!default_case) 2310 { 2311 glabel *new_default; 2312 2313 default_case 2314 = build_case_label (NULL_TREE, NULL_TREE, 2315 create_artificial_label (UNKNOWN_LOCATION)); 2316 new_default = gimple_build_label (CASE_LABEL (default_case)); 2317 gimplify_seq_add_stmt (&switch_body_seq, new_default); 2318 } 2319 2320 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr), 2321 default_case, labels); 2322 gimplify_seq_add_stmt (pre_p, switch_stmt); 2323 gimplify_seq_add_seq (pre_p, switch_body_seq); 2324 labels.release (); 2325 } 2326 else 2327 gcc_assert (SWITCH_LABELS (switch_expr)); 2328 2329 return GS_ALL_DONE; 2330 } 2331 2332 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */ 2333 2334 static enum gimplify_status 2335 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p) 2336 { 2337 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p)) 2338 == current_function_decl); 2339 2340 glabel *label_stmt = gimple_build_label (LABEL_EXPR_LABEL (*expr_p)); 2341 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p)); 2342 gimplify_seq_add_stmt (pre_p, label_stmt); 2343 2344 return GS_ALL_DONE; 2345 } 2346 2347 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */ 2348 2349 static enum gimplify_status 2350 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p) 2351 { 2352 struct gimplify_ctx *ctxp; 2353 glabel *label_stmt; 2354 2355 /* Invalid programs can play Duff's Device type games with, for example, 2356 #pragma omp parallel. At least in the C front end, we don't 2357 detect such invalid branches until after gimplification, in the 2358 diagnose_omp_blocks pass. */ 2359 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context) 2360 if (ctxp->case_labels.exists ()) 2361 break; 2362 2363 label_stmt = gimple_build_label (CASE_LABEL (*expr_p)); 2364 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p)); 2365 ctxp->case_labels.safe_push (*expr_p); 2366 gimplify_seq_add_stmt (pre_p, label_stmt); 2367 2368 return GS_ALL_DONE; 2369 } 2370 2371 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first 2372 if necessary. */ 2373 2374 tree 2375 build_and_jump (tree *label_p) 2376 { 2377 if (label_p == NULL) 2378 /* If there's nowhere to jump, just fall through. */ 2379 return NULL_TREE; 2380 2381 if (*label_p == NULL_TREE) 2382 { 2383 tree label = create_artificial_label (UNKNOWN_LOCATION); 2384 *label_p = label; 2385 } 2386 2387 return build1 (GOTO_EXPR, void_type_node, *label_p); 2388 } 2389 2390 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR. 2391 This also involves building a label to jump to and communicating it to 2392 gimplify_loop_expr through gimplify_ctxp->exit_label. */ 2393 2394 static enum gimplify_status 2395 gimplify_exit_expr (tree *expr_p) 2396 { 2397 tree cond = TREE_OPERAND (*expr_p, 0); 2398 tree expr; 2399 2400 expr = build_and_jump (&gimplify_ctxp->exit_label); 2401 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE); 2402 *expr_p = expr; 2403 2404 return GS_OK; 2405 } 2406 2407 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is 2408 different from its canonical type, wrap the whole thing inside a 2409 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical 2410 type. 2411 2412 The canonical type of a COMPONENT_REF is the type of the field being 2413 referenced--unless the field is a bit-field which can be read directly 2414 in a smaller mode, in which case the canonical type is the 2415 sign-appropriate type corresponding to that mode. */ 2416 2417 static void 2418 canonicalize_component_ref (tree *expr_p) 2419 { 2420 tree expr = *expr_p; 2421 tree type; 2422 2423 gcc_assert (TREE_CODE (expr) == COMPONENT_REF); 2424 2425 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))) 2426 type = TREE_TYPE (get_unwidened (expr, NULL_TREE)); 2427 else 2428 type = TREE_TYPE (TREE_OPERAND (expr, 1)); 2429 2430 /* One could argue that all the stuff below is not necessary for 2431 the non-bitfield case and declare it a FE error if type 2432 adjustment would be needed. */ 2433 if (TREE_TYPE (expr) != type) 2434 { 2435 #ifdef ENABLE_TYPES_CHECKING 2436 tree old_type = TREE_TYPE (expr); 2437 #endif 2438 int type_quals; 2439 2440 /* We need to preserve qualifiers and propagate them from 2441 operand 0. */ 2442 type_quals = TYPE_QUALS (type) 2443 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0))); 2444 if (TYPE_QUALS (type) != type_quals) 2445 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals); 2446 2447 /* Set the type of the COMPONENT_REF to the underlying type. */ 2448 TREE_TYPE (expr) = type; 2449 2450 #ifdef ENABLE_TYPES_CHECKING 2451 /* It is now a FE error, if the conversion from the canonical 2452 type to the original expression type is not useless. */ 2453 gcc_assert (useless_type_conversion_p (old_type, type)); 2454 #endif 2455 } 2456 } 2457 2458 /* If a NOP conversion is changing a pointer to array of foo to a pointer 2459 to foo, embed that change in the ADDR_EXPR by converting 2460 T array[U]; 2461 (T *)&array 2462 ==> 2463 &array[L] 2464 where L is the lower bound. For simplicity, only do this for constant 2465 lower bound. 2466 The constraint is that the type of &array[L] is trivially convertible 2467 to T *. */ 2468 2469 static void 2470 canonicalize_addr_expr (tree *expr_p) 2471 { 2472 tree expr = *expr_p; 2473 tree addr_expr = TREE_OPERAND (expr, 0); 2474 tree datype, ddatype, pddatype; 2475 2476 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */ 2477 if (!POINTER_TYPE_P (TREE_TYPE (expr)) 2478 || TREE_CODE (addr_expr) != ADDR_EXPR) 2479 return; 2480 2481 /* The addr_expr type should be a pointer to an array. */ 2482 datype = TREE_TYPE (TREE_TYPE (addr_expr)); 2483 if (TREE_CODE (datype) != ARRAY_TYPE) 2484 return; 2485 2486 /* The pointer to element type shall be trivially convertible to 2487 the expression pointer type. */ 2488 ddatype = TREE_TYPE (datype); 2489 pddatype = build_pointer_type (ddatype); 2490 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)), 2491 pddatype)) 2492 return; 2493 2494 /* The lower bound and element sizes must be constant. */ 2495 if (!TYPE_SIZE_UNIT (ddatype) 2496 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST 2497 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype)) 2498 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST) 2499 return; 2500 2501 /* All checks succeeded. Build a new node to merge the cast. */ 2502 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0), 2503 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)), 2504 NULL_TREE, NULL_TREE); 2505 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p); 2506 2507 /* We can have stripped a required restrict qualifier above. */ 2508 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p))) 2509 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p); 2510 } 2511 2512 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions 2513 underneath as appropriate. */ 2514 2515 static enum gimplify_status 2516 gimplify_conversion (tree *expr_p) 2517 { 2518 location_t loc = EXPR_LOCATION (*expr_p); 2519 gcc_assert (CONVERT_EXPR_P (*expr_p)); 2520 2521 /* Then strip away all but the outermost conversion. */ 2522 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0)); 2523 2524 /* And remove the outermost conversion if it's useless. */ 2525 if (tree_ssa_useless_type_conversion (*expr_p)) 2526 *expr_p = TREE_OPERAND (*expr_p, 0); 2527 2528 /* If we still have a conversion at the toplevel, 2529 then canonicalize some constructs. */ 2530 if (CONVERT_EXPR_P (*expr_p)) 2531 { 2532 tree sub = TREE_OPERAND (*expr_p, 0); 2533 2534 /* If a NOP conversion is changing the type of a COMPONENT_REF 2535 expression, then canonicalize its type now in order to expose more 2536 redundant conversions. */ 2537 if (TREE_CODE (sub) == COMPONENT_REF) 2538 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0)); 2539 2540 /* If a NOP conversion is changing a pointer to array of foo 2541 to a pointer to foo, embed that change in the ADDR_EXPR. */ 2542 else if (TREE_CODE (sub) == ADDR_EXPR) 2543 canonicalize_addr_expr (expr_p); 2544 } 2545 2546 /* If we have a conversion to a non-register type force the 2547 use of a VIEW_CONVERT_EXPR instead. */ 2548 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p))) 2549 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p), 2550 TREE_OPERAND (*expr_p, 0)); 2551 2552 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */ 2553 if (TREE_CODE (*expr_p) == CONVERT_EXPR) 2554 TREE_SET_CODE (*expr_p, NOP_EXPR); 2555 2556 return GS_OK; 2557 } 2558 2559 /* Nonlocal VLAs seen in the current function. */ 2560 static hash_set<tree> *nonlocal_vlas; 2561 2562 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */ 2563 static tree nonlocal_vla_vars; 2564 2565 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a 2566 DECL_VALUE_EXPR, and it's worth re-examining things. */ 2567 2568 static enum gimplify_status 2569 gimplify_var_or_parm_decl (tree *expr_p) 2570 { 2571 tree decl = *expr_p; 2572 2573 /* ??? If this is a local variable, and it has not been seen in any 2574 outer BIND_EXPR, then it's probably the result of a duplicate 2575 declaration, for which we've already issued an error. It would 2576 be really nice if the front end wouldn't leak these at all. 2577 Currently the only known culprit is C++ destructors, as seen 2578 in g++.old-deja/g++.jason/binding.C. */ 2579 if (VAR_P (decl) 2580 && !DECL_SEEN_IN_BIND_EXPR_P (decl) 2581 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl) 2582 && decl_function_context (decl) == current_function_decl) 2583 { 2584 gcc_assert (seen_error ()); 2585 return GS_ERROR; 2586 } 2587 2588 /* When within an OMP context, notice uses of variables. */ 2589 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true)) 2590 return GS_ALL_DONE; 2591 2592 /* If the decl is an alias for another expression, substitute it now. */ 2593 if (DECL_HAS_VALUE_EXPR_P (decl)) 2594 { 2595 tree value_expr = DECL_VALUE_EXPR (decl); 2596 2597 /* For referenced nonlocal VLAs add a decl for debugging purposes 2598 to the current function. */ 2599 if (VAR_P (decl) 2600 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST 2601 && nonlocal_vlas != NULL 2602 && TREE_CODE (value_expr) == INDIRECT_REF 2603 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL 2604 && decl_function_context (decl) != current_function_decl) 2605 { 2606 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 2607 while (ctx 2608 && (ctx->region_type == ORT_WORKSHARE 2609 || ctx->region_type == ORT_SIMD 2610 || ctx->region_type == ORT_ACC)) 2611 ctx = ctx->outer_context; 2612 if (!ctx && !nonlocal_vlas->add (decl)) 2613 { 2614 tree copy = copy_node (decl); 2615 2616 lang_hooks.dup_lang_specific_decl (copy); 2617 SET_DECL_RTL (copy, 0); 2618 TREE_USED (copy) = 1; 2619 DECL_CHAIN (copy) = nonlocal_vla_vars; 2620 nonlocal_vla_vars = copy; 2621 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr)); 2622 DECL_HAS_VALUE_EXPR_P (copy) = 1; 2623 } 2624 } 2625 2626 *expr_p = unshare_expr (value_expr); 2627 return GS_OK; 2628 } 2629 2630 return GS_ALL_DONE; 2631 } 2632 2633 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */ 2634 2635 static void 2636 recalculate_side_effects (tree t) 2637 { 2638 enum tree_code code = TREE_CODE (t); 2639 int len = TREE_OPERAND_LENGTH (t); 2640 int i; 2641 2642 switch (TREE_CODE_CLASS (code)) 2643 { 2644 case tcc_expression: 2645 switch (code) 2646 { 2647 case INIT_EXPR: 2648 case MODIFY_EXPR: 2649 case VA_ARG_EXPR: 2650 case PREDECREMENT_EXPR: 2651 case PREINCREMENT_EXPR: 2652 case POSTDECREMENT_EXPR: 2653 case POSTINCREMENT_EXPR: 2654 /* All of these have side-effects, no matter what their 2655 operands are. */ 2656 return; 2657 2658 default: 2659 break; 2660 } 2661 /* Fall through. */ 2662 2663 case tcc_comparison: /* a comparison expression */ 2664 case tcc_unary: /* a unary arithmetic expression */ 2665 case tcc_binary: /* a binary arithmetic expression */ 2666 case tcc_reference: /* a reference */ 2667 case tcc_vl_exp: /* a function call */ 2668 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t); 2669 for (i = 0; i < len; ++i) 2670 { 2671 tree op = TREE_OPERAND (t, i); 2672 if (op && TREE_SIDE_EFFECTS (op)) 2673 TREE_SIDE_EFFECTS (t) = 1; 2674 } 2675 break; 2676 2677 case tcc_constant: 2678 /* No side-effects. */ 2679 return; 2680 2681 default: 2682 gcc_unreachable (); 2683 } 2684 } 2685 2686 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR 2687 node *EXPR_P. 2688 2689 compound_lval 2690 : min_lval '[' val ']' 2691 | min_lval '.' ID 2692 | compound_lval '[' val ']' 2693 | compound_lval '.' ID 2694 2695 This is not part of the original SIMPLE definition, which separates 2696 array and member references, but it seems reasonable to handle them 2697 together. Also, this way we don't run into problems with union 2698 aliasing; gcc requires that for accesses through a union to alias, the 2699 union reference must be explicit, which was not always the case when we 2700 were splitting up array and member refs. 2701 2702 PRE_P points to the sequence where side effects that must happen before 2703 *EXPR_P should be stored. 2704 2705 POST_P points to the sequence where side effects that must happen after 2706 *EXPR_P should be stored. */ 2707 2708 static enum gimplify_status 2709 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 2710 fallback_t fallback) 2711 { 2712 tree *p; 2713 enum gimplify_status ret = GS_ALL_DONE, tret; 2714 int i; 2715 location_t loc = EXPR_LOCATION (*expr_p); 2716 tree expr = *expr_p; 2717 2718 /* Create a stack of the subexpressions so later we can walk them in 2719 order from inner to outer. */ 2720 auto_vec<tree, 10> expr_stack; 2721 2722 /* We can handle anything that get_inner_reference can deal with. */ 2723 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0)) 2724 { 2725 restart: 2726 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */ 2727 if (TREE_CODE (*p) == INDIRECT_REF) 2728 *p = fold_indirect_ref_loc (loc, *p); 2729 2730 if (handled_component_p (*p)) 2731 ; 2732 /* Expand DECL_VALUE_EXPR now. In some cases that may expose 2733 additional COMPONENT_REFs. */ 2734 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL) 2735 && gimplify_var_or_parm_decl (p) == GS_OK) 2736 goto restart; 2737 else 2738 break; 2739 2740 expr_stack.safe_push (*p); 2741 } 2742 2743 gcc_assert (expr_stack.length ()); 2744 2745 /* Now EXPR_STACK is a stack of pointers to all the refs we've 2746 walked through and P points to the innermost expression. 2747 2748 Java requires that we elaborated nodes in source order. That 2749 means we must gimplify the inner expression followed by each of 2750 the indices, in order. But we can't gimplify the inner 2751 expression until we deal with any variable bounds, sizes, or 2752 positions in order to deal with PLACEHOLDER_EXPRs. 2753 2754 So we do this in three steps. First we deal with the annotations 2755 for any variables in the components, then we gimplify the base, 2756 then we gimplify any indices, from left to right. */ 2757 for (i = expr_stack.length () - 1; i >= 0; i--) 2758 { 2759 tree t = expr_stack[i]; 2760 2761 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) 2762 { 2763 /* Gimplify the low bound and element type size and put them into 2764 the ARRAY_REF. If these values are set, they have already been 2765 gimplified. */ 2766 if (TREE_OPERAND (t, 2) == NULL_TREE) 2767 { 2768 tree low = unshare_expr (array_ref_low_bound (t)); 2769 if (!is_gimple_min_invariant (low)) 2770 { 2771 TREE_OPERAND (t, 2) = low; 2772 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, 2773 post_p, is_gimple_reg, 2774 fb_rvalue); 2775 ret = MIN (ret, tret); 2776 } 2777 } 2778 else 2779 { 2780 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, 2781 is_gimple_reg, fb_rvalue); 2782 ret = MIN (ret, tret); 2783 } 2784 2785 if (TREE_OPERAND (t, 3) == NULL_TREE) 2786 { 2787 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0))); 2788 tree elmt_size = unshare_expr (array_ref_element_size (t)); 2789 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type)); 2790 2791 /* Divide the element size by the alignment of the element 2792 type (above). */ 2793 elmt_size 2794 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor); 2795 2796 if (!is_gimple_min_invariant (elmt_size)) 2797 { 2798 TREE_OPERAND (t, 3) = elmt_size; 2799 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, 2800 post_p, is_gimple_reg, 2801 fb_rvalue); 2802 ret = MIN (ret, tret); 2803 } 2804 } 2805 else 2806 { 2807 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p, 2808 is_gimple_reg, fb_rvalue); 2809 ret = MIN (ret, tret); 2810 } 2811 } 2812 else if (TREE_CODE (t) == COMPONENT_REF) 2813 { 2814 /* Set the field offset into T and gimplify it. */ 2815 if (TREE_OPERAND (t, 2) == NULL_TREE) 2816 { 2817 tree offset = unshare_expr (component_ref_field_offset (t)); 2818 tree field = TREE_OPERAND (t, 1); 2819 tree factor 2820 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT); 2821 2822 /* Divide the offset by its alignment. */ 2823 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor); 2824 2825 if (!is_gimple_min_invariant (offset)) 2826 { 2827 TREE_OPERAND (t, 2) = offset; 2828 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, 2829 post_p, is_gimple_reg, 2830 fb_rvalue); 2831 ret = MIN (ret, tret); 2832 } 2833 } 2834 else 2835 { 2836 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, 2837 is_gimple_reg, fb_rvalue); 2838 ret = MIN (ret, tret); 2839 } 2840 } 2841 } 2842 2843 /* Step 2 is to gimplify the base expression. Make sure lvalue is set 2844 so as to match the min_lval predicate. Failure to do so may result 2845 in the creation of large aggregate temporaries. */ 2846 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, 2847 fallback | fb_lvalue); 2848 ret = MIN (ret, tret); 2849 2850 /* And finally, the indices and operands of ARRAY_REF. During this 2851 loop we also remove any useless conversions. */ 2852 for (; expr_stack.length () > 0; ) 2853 { 2854 tree t = expr_stack.pop (); 2855 2856 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) 2857 { 2858 /* Gimplify the dimension. */ 2859 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))) 2860 { 2861 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p, 2862 is_gimple_val, fb_rvalue); 2863 ret = MIN (ret, tret); 2864 } 2865 } 2866 2867 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0)); 2868 2869 /* The innermost expression P may have originally had 2870 TREE_SIDE_EFFECTS set which would have caused all the outer 2871 expressions in *EXPR_P leading to P to also have had 2872 TREE_SIDE_EFFECTS set. */ 2873 recalculate_side_effects (t); 2874 } 2875 2876 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */ 2877 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF) 2878 { 2879 canonicalize_component_ref (expr_p); 2880 } 2881 2882 expr_stack.release (); 2883 2884 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE); 2885 2886 return ret; 2887 } 2888 2889 /* Gimplify the self modifying expression pointed to by EXPR_P 2890 (++, --, +=, -=). 2891 2892 PRE_P points to the list where side effects that must happen before 2893 *EXPR_P should be stored. 2894 2895 POST_P points to the list where side effects that must happen after 2896 *EXPR_P should be stored. 2897 2898 WANT_VALUE is nonzero iff we want to use the value of this expression 2899 in another expression. 2900 2901 ARITH_TYPE is the type the computation should be performed in. */ 2902 2903 enum gimplify_status 2904 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 2905 bool want_value, tree arith_type) 2906 { 2907 enum tree_code code; 2908 tree lhs, lvalue, rhs, t1; 2909 gimple_seq post = NULL, *orig_post_p = post_p; 2910 bool postfix; 2911 enum tree_code arith_code; 2912 enum gimplify_status ret; 2913 location_t loc = EXPR_LOCATION (*expr_p); 2914 2915 code = TREE_CODE (*expr_p); 2916 2917 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR 2918 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR); 2919 2920 /* Prefix or postfix? */ 2921 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR) 2922 /* Faster to treat as prefix if result is not used. */ 2923 postfix = want_value; 2924 else 2925 postfix = false; 2926 2927 /* For postfix, make sure the inner expression's post side effects 2928 are executed after side effects from this expression. */ 2929 if (postfix) 2930 post_p = &post; 2931 2932 /* Add or subtract? */ 2933 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) 2934 arith_code = PLUS_EXPR; 2935 else 2936 arith_code = MINUS_EXPR; 2937 2938 /* Gimplify the LHS into a GIMPLE lvalue. */ 2939 lvalue = TREE_OPERAND (*expr_p, 0); 2940 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue); 2941 if (ret == GS_ERROR) 2942 return ret; 2943 2944 /* Extract the operands to the arithmetic operation. */ 2945 lhs = lvalue; 2946 rhs = TREE_OPERAND (*expr_p, 1); 2947 2948 /* For postfix operator, we evaluate the LHS to an rvalue and then use 2949 that as the result value and in the postqueue operation. */ 2950 if (postfix) 2951 { 2952 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue); 2953 if (ret == GS_ERROR) 2954 return ret; 2955 2956 lhs = get_initialized_tmp_var (lhs, pre_p, NULL); 2957 } 2958 2959 /* For POINTERs increment, use POINTER_PLUS_EXPR. */ 2960 if (POINTER_TYPE_P (TREE_TYPE (lhs))) 2961 { 2962 rhs = convert_to_ptrofftype_loc (loc, rhs); 2963 if (arith_code == MINUS_EXPR) 2964 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs); 2965 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs); 2966 } 2967 else 2968 t1 = fold_convert (TREE_TYPE (*expr_p), 2969 fold_build2 (arith_code, arith_type, 2970 fold_convert (arith_type, lhs), 2971 fold_convert (arith_type, rhs))); 2972 2973 if (postfix) 2974 { 2975 gimplify_assign (lvalue, t1, pre_p); 2976 gimplify_seq_add_seq (orig_post_p, post); 2977 *expr_p = lhs; 2978 return GS_ALL_DONE; 2979 } 2980 else 2981 { 2982 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1); 2983 return GS_OK; 2984 } 2985 } 2986 2987 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */ 2988 2989 static void 2990 maybe_with_size_expr (tree *expr_p) 2991 { 2992 tree expr = *expr_p; 2993 tree type = TREE_TYPE (expr); 2994 tree size; 2995 2996 /* If we've already wrapped this or the type is error_mark_node, we can't do 2997 anything. */ 2998 if (TREE_CODE (expr) == WITH_SIZE_EXPR 2999 || type == error_mark_node) 3000 return; 3001 3002 /* If the size isn't known or is a constant, we have nothing to do. */ 3003 size = TYPE_SIZE_UNIT (type); 3004 if (!size || TREE_CODE (size) == INTEGER_CST) 3005 return; 3006 3007 /* Otherwise, make a WITH_SIZE_EXPR. */ 3008 size = unshare_expr (size); 3009 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr); 3010 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size); 3011 } 3012 3013 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P 3014 Store any side-effects in PRE_P. CALL_LOCATION is the location of 3015 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be 3016 gimplified to an SSA name. */ 3017 3018 enum gimplify_status 3019 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location, 3020 bool allow_ssa) 3021 { 3022 bool (*test) (tree); 3023 fallback_t fb; 3024 3025 /* In general, we allow lvalues for function arguments to avoid 3026 extra overhead of copying large aggregates out of even larger 3027 aggregates into temporaries only to copy the temporaries to 3028 the argument list. Make optimizers happy by pulling out to 3029 temporaries those types that fit in registers. */ 3030 if (is_gimple_reg_type (TREE_TYPE (*arg_p))) 3031 test = is_gimple_val, fb = fb_rvalue; 3032 else 3033 { 3034 test = is_gimple_lvalue, fb = fb_either; 3035 /* Also strip a TARGET_EXPR that would force an extra copy. */ 3036 if (TREE_CODE (*arg_p) == TARGET_EXPR) 3037 { 3038 tree init = TARGET_EXPR_INITIAL (*arg_p); 3039 if (init 3040 && !VOID_TYPE_P (TREE_TYPE (init))) 3041 *arg_p = init; 3042 } 3043 } 3044 3045 /* If this is a variable sized type, we must remember the size. */ 3046 maybe_with_size_expr (arg_p); 3047 3048 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */ 3049 /* Make sure arguments have the same location as the function call 3050 itself. */ 3051 protected_set_expr_location (*arg_p, call_location); 3052 3053 /* There is a sequence point before a function call. Side effects in 3054 the argument list must occur before the actual call. So, when 3055 gimplifying arguments, force gimplify_expr to use an internal 3056 post queue which is then appended to the end of PRE_P. */ 3057 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa); 3058 } 3059 3060 /* Don't fold inside offloading or taskreg regions: it can break code by 3061 adding decl references that weren't in the source. We'll do it during 3062 omplower pass instead. */ 3063 3064 static bool 3065 maybe_fold_stmt (gimple_stmt_iterator *gsi) 3066 { 3067 struct gimplify_omp_ctx *ctx; 3068 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context) 3069 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0) 3070 return false; 3071 return fold_stmt (gsi); 3072 } 3073 3074 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P. 3075 WANT_VALUE is true if the result of the call is desired. */ 3076 3077 static enum gimplify_status 3078 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) 3079 { 3080 tree fndecl, parms, p, fnptrtype; 3081 enum gimplify_status ret; 3082 int i, nargs; 3083 gcall *call; 3084 bool builtin_va_start_p = false; 3085 location_t loc = EXPR_LOCATION (*expr_p); 3086 3087 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR); 3088 3089 /* For reliable diagnostics during inlining, it is necessary that 3090 every call_expr be annotated with file and line. */ 3091 if (! EXPR_HAS_LOCATION (*expr_p)) 3092 SET_EXPR_LOCATION (*expr_p, input_location); 3093 3094 /* Gimplify internal functions created in the FEs. */ 3095 if (CALL_EXPR_FN (*expr_p) == NULL_TREE) 3096 { 3097 if (want_value) 3098 return GS_ALL_DONE; 3099 3100 nargs = call_expr_nargs (*expr_p); 3101 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p); 3102 auto_vec<tree> vargs (nargs); 3103 3104 for (i = 0; i < nargs; i++) 3105 { 3106 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, 3107 EXPR_LOCATION (*expr_p)); 3108 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i)); 3109 } 3110 gimple *call = gimple_build_call_internal_vec (ifn, vargs); 3111 gimplify_seq_add_stmt (pre_p, call); 3112 return GS_ALL_DONE; 3113 } 3114 3115 /* This may be a call to a builtin function. 3116 3117 Builtin function calls may be transformed into different 3118 (and more efficient) builtin function calls under certain 3119 circumstances. Unfortunately, gimplification can muck things 3120 up enough that the builtin expanders are not aware that certain 3121 transformations are still valid. 3122 3123 So we attempt transformation/gimplification of the call before 3124 we gimplify the CALL_EXPR. At this time we do not manage to 3125 transform all calls in the same manner as the expanders do, but 3126 we do transform most of them. */ 3127 fndecl = get_callee_fndecl (*expr_p); 3128 if (fndecl 3129 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) 3130 switch (DECL_FUNCTION_CODE (fndecl)) 3131 { 3132 case BUILT_IN_ALLOCA: 3133 case BUILT_IN_ALLOCA_WITH_ALIGN: 3134 /* If the call has been built for a variable-sized object, then we 3135 want to restore the stack level when the enclosing BIND_EXPR is 3136 exited to reclaim the allocated space; otherwise, we precisely 3137 need to do the opposite and preserve the latest stack level. */ 3138 if (CALL_ALLOCA_FOR_VAR_P (*expr_p)) 3139 gimplify_ctxp->save_stack = true; 3140 else 3141 gimplify_ctxp->keep_stack = true; 3142 break; 3143 3144 case BUILT_IN_VA_START: 3145 { 3146 builtin_va_start_p = TRUE; 3147 if (call_expr_nargs (*expr_p) < 2) 3148 { 3149 error ("too few arguments to function %<va_start%>"); 3150 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p)); 3151 return GS_OK; 3152 } 3153 3154 if (fold_builtin_next_arg (*expr_p, true)) 3155 { 3156 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p)); 3157 return GS_OK; 3158 } 3159 break; 3160 } 3161 3162 default: 3163 ; 3164 } 3165 if (fndecl && DECL_BUILT_IN (fndecl)) 3166 { 3167 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value); 3168 if (new_tree && new_tree != *expr_p) 3169 { 3170 /* There was a transformation of this call which computes the 3171 same value, but in a more efficient way. Return and try 3172 again. */ 3173 *expr_p = new_tree; 3174 return GS_OK; 3175 } 3176 } 3177 3178 /* Remember the original function pointer type. */ 3179 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p)); 3180 3181 /* There is a sequence point before the call, so any side effects in 3182 the calling expression must occur before the actual call. Force 3183 gimplify_expr to use an internal post queue. */ 3184 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL, 3185 is_gimple_call_addr, fb_rvalue); 3186 3187 nargs = call_expr_nargs (*expr_p); 3188 3189 /* Get argument types for verification. */ 3190 fndecl = get_callee_fndecl (*expr_p); 3191 parms = NULL_TREE; 3192 if (fndecl) 3193 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); 3194 else 3195 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype)); 3196 3197 if (fndecl && DECL_ARGUMENTS (fndecl)) 3198 p = DECL_ARGUMENTS (fndecl); 3199 else if (parms) 3200 p = parms; 3201 else 3202 p = NULL_TREE; 3203 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p)) 3204 ; 3205 3206 /* If the last argument is __builtin_va_arg_pack () and it is not 3207 passed as a named argument, decrease the number of CALL_EXPR 3208 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */ 3209 if (!p 3210 && i < nargs 3211 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR) 3212 { 3213 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1); 3214 tree last_arg_fndecl = get_callee_fndecl (last_arg); 3215 3216 if (last_arg_fndecl 3217 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL 3218 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL 3219 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK) 3220 { 3221 tree call = *expr_p; 3222 3223 --nargs; 3224 *expr_p = build_call_array_loc (loc, TREE_TYPE (call), 3225 CALL_EXPR_FN (call), 3226 nargs, CALL_EXPR_ARGP (call)); 3227 3228 /* Copy all CALL_EXPR flags, location and block, except 3229 CALL_EXPR_VA_ARG_PACK flag. */ 3230 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call); 3231 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call); 3232 CALL_EXPR_RETURN_SLOT_OPT (*expr_p) 3233 = CALL_EXPR_RETURN_SLOT_OPT (call); 3234 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call); 3235 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call)); 3236 3237 /* Set CALL_EXPR_VA_ARG_PACK. */ 3238 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1; 3239 } 3240 } 3241 3242 /* If the call returns twice then after building the CFG the call 3243 argument computations will no longer dominate the call because 3244 we add an abnormal incoming edge to the call. So do not use SSA 3245 vars there. */ 3246 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE; 3247 3248 /* Gimplify the function arguments. */ 3249 if (nargs > 0) 3250 { 3251 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0); 3252 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs; 3253 PUSH_ARGS_REVERSED ? i-- : i++) 3254 { 3255 enum gimplify_status t; 3256 3257 /* Avoid gimplifying the second argument to va_start, which needs to 3258 be the plain PARM_DECL. */ 3259 if ((i != 1) || !builtin_va_start_p) 3260 { 3261 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, 3262 EXPR_LOCATION (*expr_p), ! returns_twice); 3263 3264 if (t == GS_ERROR) 3265 ret = GS_ERROR; 3266 } 3267 } 3268 } 3269 3270 /* Gimplify the static chain. */ 3271 if (CALL_EXPR_STATIC_CHAIN (*expr_p)) 3272 { 3273 if (fndecl && !DECL_STATIC_CHAIN (fndecl)) 3274 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL; 3275 else 3276 { 3277 enum gimplify_status t; 3278 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p, 3279 EXPR_LOCATION (*expr_p), ! returns_twice); 3280 if (t == GS_ERROR) 3281 ret = GS_ERROR; 3282 } 3283 } 3284 3285 /* Verify the function result. */ 3286 if (want_value && fndecl 3287 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype)))) 3288 { 3289 error_at (loc, "using result of function returning %<void%>"); 3290 ret = GS_ERROR; 3291 } 3292 3293 /* Try this again in case gimplification exposed something. */ 3294 if (ret != GS_ERROR) 3295 { 3296 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value); 3297 3298 if (new_tree && new_tree != *expr_p) 3299 { 3300 /* There was a transformation of this call which computes the 3301 same value, but in a more efficient way. Return and try 3302 again. */ 3303 *expr_p = new_tree; 3304 return GS_OK; 3305 } 3306 } 3307 else 3308 { 3309 *expr_p = error_mark_node; 3310 return GS_ERROR; 3311 } 3312 3313 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its 3314 decl. This allows us to eliminate redundant or useless 3315 calls to "const" functions. */ 3316 if (TREE_CODE (*expr_p) == CALL_EXPR) 3317 { 3318 int flags = call_expr_flags (*expr_p); 3319 if (flags & (ECF_CONST | ECF_PURE) 3320 /* An infinite loop is considered a side effect. */ 3321 && !(flags & (ECF_LOOPING_CONST_OR_PURE))) 3322 TREE_SIDE_EFFECTS (*expr_p) = 0; 3323 } 3324 3325 /* If the value is not needed by the caller, emit a new GIMPLE_CALL 3326 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified 3327 form and delegate the creation of a GIMPLE_CALL to 3328 gimplify_modify_expr. This is always possible because when 3329 WANT_VALUE is true, the caller wants the result of this call into 3330 a temporary, which means that we will emit an INIT_EXPR in 3331 internal_get_tmp_var which will then be handled by 3332 gimplify_modify_expr. */ 3333 if (!want_value) 3334 { 3335 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we 3336 have to do is replicate it as a GIMPLE_CALL tuple. */ 3337 gimple_stmt_iterator gsi; 3338 call = gimple_build_call_from_tree (*expr_p); 3339 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype)); 3340 notice_special_calls (call); 3341 gimplify_seq_add_stmt (pre_p, call); 3342 gsi = gsi_last (*pre_p); 3343 maybe_fold_stmt (&gsi); 3344 *expr_p = NULL_TREE; 3345 } 3346 else 3347 /* Remember the original function type. */ 3348 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype, 3349 CALL_EXPR_FN (*expr_p)); 3350 3351 return ret; 3352 } 3353 3354 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by 3355 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs. 3356 3357 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the 3358 condition is true or false, respectively. If null, we should generate 3359 our own to skip over the evaluation of this specific expression. 3360 3361 LOCUS is the source location of the COND_EXPR. 3362 3363 This function is the tree equivalent of do_jump. 3364 3365 shortcut_cond_r should only be called by shortcut_cond_expr. */ 3366 3367 static tree 3368 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p, 3369 location_t locus) 3370 { 3371 tree local_label = NULL_TREE; 3372 tree t, expr = NULL; 3373 3374 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to 3375 retain the shortcut semantics. Just insert the gotos here; 3376 shortcut_cond_expr will append the real blocks later. */ 3377 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) 3378 { 3379 location_t new_locus; 3380 3381 /* Turn if (a && b) into 3382 3383 if (a); else goto no; 3384 if (b) goto yes; else goto no; 3385 (no:) */ 3386 3387 if (false_label_p == NULL) 3388 false_label_p = &local_label; 3389 3390 /* Keep the original source location on the first 'if'. */ 3391 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus); 3392 append_to_statement_list (t, &expr); 3393 3394 /* Set the source location of the && on the second 'if'. */ 3395 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus; 3396 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p, 3397 new_locus); 3398 append_to_statement_list (t, &expr); 3399 } 3400 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR) 3401 { 3402 location_t new_locus; 3403 3404 /* Turn if (a || b) into 3405 3406 if (a) goto yes; 3407 if (b) goto yes; else goto no; 3408 (yes:) */ 3409 3410 if (true_label_p == NULL) 3411 true_label_p = &local_label; 3412 3413 /* Keep the original source location on the first 'if'. */ 3414 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus); 3415 append_to_statement_list (t, &expr); 3416 3417 /* Set the source location of the || on the second 'if'. */ 3418 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus; 3419 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p, 3420 new_locus); 3421 append_to_statement_list (t, &expr); 3422 } 3423 else if (TREE_CODE (pred) == COND_EXPR 3424 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1))) 3425 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2)))) 3426 { 3427 location_t new_locus; 3428 3429 /* As long as we're messing with gotos, turn if (a ? b : c) into 3430 if (a) 3431 if (b) goto yes; else goto no; 3432 else 3433 if (c) goto yes; else goto no; 3434 3435 Don't do this if one of the arms has void type, which can happen 3436 in C++ when the arm is throw. */ 3437 3438 /* Keep the original source location on the first 'if'. Set the source 3439 location of the ? on the second 'if'. */ 3440 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus; 3441 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0), 3442 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, 3443 false_label_p, locus), 3444 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p, 3445 false_label_p, new_locus)); 3446 } 3447 else 3448 { 3449 expr = build3 (COND_EXPR, void_type_node, pred, 3450 build_and_jump (true_label_p), 3451 build_and_jump (false_label_p)); 3452 SET_EXPR_LOCATION (expr, locus); 3453 } 3454 3455 if (local_label) 3456 { 3457 t = build1 (LABEL_EXPR, void_type_node, local_label); 3458 append_to_statement_list (t, &expr); 3459 } 3460 3461 return expr; 3462 } 3463 3464 /* Given a conditional expression EXPR with short-circuit boolean 3465 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the 3466 predicate apart into the equivalent sequence of conditionals. */ 3467 3468 static tree 3469 shortcut_cond_expr (tree expr) 3470 { 3471 tree pred = TREE_OPERAND (expr, 0); 3472 tree then_ = TREE_OPERAND (expr, 1); 3473 tree else_ = TREE_OPERAND (expr, 2); 3474 tree true_label, false_label, end_label, t; 3475 tree *true_label_p; 3476 tree *false_label_p; 3477 bool emit_end, emit_false, jump_over_else; 3478 bool then_se = then_ && TREE_SIDE_EFFECTS (then_); 3479 bool else_se = else_ && TREE_SIDE_EFFECTS (else_); 3480 3481 /* First do simple transformations. */ 3482 if (!else_se) 3483 { 3484 /* If there is no 'else', turn 3485 if (a && b) then c 3486 into 3487 if (a) if (b) then c. */ 3488 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) 3489 { 3490 /* Keep the original source location on the first 'if'. */ 3491 location_t locus = EXPR_LOC_OR_LOC (expr, input_location); 3492 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); 3493 /* Set the source location of the && on the second 'if'. */ 3494 if (EXPR_HAS_LOCATION (pred)) 3495 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred)); 3496 then_ = shortcut_cond_expr (expr); 3497 then_se = then_ && TREE_SIDE_EFFECTS (then_); 3498 pred = TREE_OPERAND (pred, 0); 3499 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE); 3500 SET_EXPR_LOCATION (expr, locus); 3501 } 3502 } 3503 3504 if (!then_se) 3505 { 3506 /* If there is no 'then', turn 3507 if (a || b); else d 3508 into 3509 if (a); else if (b); else d. */ 3510 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR) 3511 { 3512 /* Keep the original source location on the first 'if'. */ 3513 location_t locus = EXPR_LOC_OR_LOC (expr, input_location); 3514 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); 3515 /* Set the source location of the || on the second 'if'. */ 3516 if (EXPR_HAS_LOCATION (pred)) 3517 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred)); 3518 else_ = shortcut_cond_expr (expr); 3519 else_se = else_ && TREE_SIDE_EFFECTS (else_); 3520 pred = TREE_OPERAND (pred, 0); 3521 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_); 3522 SET_EXPR_LOCATION (expr, locus); 3523 } 3524 } 3525 3526 /* If we're done, great. */ 3527 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR 3528 && TREE_CODE (pred) != TRUTH_ORIF_EXPR) 3529 return expr; 3530 3531 /* Otherwise we need to mess with gotos. Change 3532 if (a) c; else d; 3533 to 3534 if (a); else goto no; 3535 c; goto end; 3536 no: d; end: 3537 and recursively gimplify the condition. */ 3538 3539 true_label = false_label = end_label = NULL_TREE; 3540 3541 /* If our arms just jump somewhere, hijack those labels so we don't 3542 generate jumps to jumps. */ 3543 3544 if (then_ 3545 && TREE_CODE (then_) == GOTO_EXPR 3546 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL) 3547 { 3548 true_label = GOTO_DESTINATION (then_); 3549 then_ = NULL; 3550 then_se = false; 3551 } 3552 3553 if (else_ 3554 && TREE_CODE (else_) == GOTO_EXPR 3555 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL) 3556 { 3557 false_label = GOTO_DESTINATION (else_); 3558 else_ = NULL; 3559 else_se = false; 3560 } 3561 3562 /* If we aren't hijacking a label for the 'then' branch, it falls through. */ 3563 if (true_label) 3564 true_label_p = &true_label; 3565 else 3566 true_label_p = NULL; 3567 3568 /* The 'else' branch also needs a label if it contains interesting code. */ 3569 if (false_label || else_se) 3570 false_label_p = &false_label; 3571 else 3572 false_label_p = NULL; 3573 3574 /* If there was nothing else in our arms, just forward the label(s). */ 3575 if (!then_se && !else_se) 3576 return shortcut_cond_r (pred, true_label_p, false_label_p, 3577 EXPR_LOC_OR_LOC (expr, input_location)); 3578 3579 /* If our last subexpression already has a terminal label, reuse it. */ 3580 if (else_se) 3581 t = expr_last (else_); 3582 else if (then_se) 3583 t = expr_last (then_); 3584 else 3585 t = NULL; 3586 if (t && TREE_CODE (t) == LABEL_EXPR) 3587 end_label = LABEL_EXPR_LABEL (t); 3588 3589 /* If we don't care about jumping to the 'else' branch, jump to the end 3590 if the condition is false. */ 3591 if (!false_label_p) 3592 false_label_p = &end_label; 3593 3594 /* We only want to emit these labels if we aren't hijacking them. */ 3595 emit_end = (end_label == NULL_TREE); 3596 emit_false = (false_label == NULL_TREE); 3597 3598 /* We only emit the jump over the else clause if we have to--if the 3599 then clause may fall through. Otherwise we can wind up with a 3600 useless jump and a useless label at the end of gimplified code, 3601 which will cause us to think that this conditional as a whole 3602 falls through even if it doesn't. If we then inline a function 3603 which ends with such a condition, that can cause us to issue an 3604 inappropriate warning about control reaching the end of a 3605 non-void function. */ 3606 jump_over_else = block_may_fallthru (then_); 3607 3608 pred = shortcut_cond_r (pred, true_label_p, false_label_p, 3609 EXPR_LOC_OR_LOC (expr, input_location)); 3610 3611 expr = NULL; 3612 append_to_statement_list (pred, &expr); 3613 3614 append_to_statement_list (then_, &expr); 3615 if (else_se) 3616 { 3617 if (jump_over_else) 3618 { 3619 tree last = expr_last (expr); 3620 t = build_and_jump (&end_label); 3621 if (EXPR_HAS_LOCATION (last)) 3622 SET_EXPR_LOCATION (t, EXPR_LOCATION (last)); 3623 append_to_statement_list (t, &expr); 3624 } 3625 if (emit_false) 3626 { 3627 t = build1 (LABEL_EXPR, void_type_node, false_label); 3628 append_to_statement_list (t, &expr); 3629 } 3630 append_to_statement_list (else_, &expr); 3631 } 3632 if (emit_end && end_label) 3633 { 3634 t = build1 (LABEL_EXPR, void_type_node, end_label); 3635 append_to_statement_list (t, &expr); 3636 } 3637 3638 return expr; 3639 } 3640 3641 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */ 3642 3643 tree 3644 gimple_boolify (tree expr) 3645 { 3646 tree type = TREE_TYPE (expr); 3647 location_t loc = EXPR_LOCATION (expr); 3648 3649 if (TREE_CODE (expr) == NE_EXPR 3650 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR 3651 && integer_zerop (TREE_OPERAND (expr, 1))) 3652 { 3653 tree call = TREE_OPERAND (expr, 0); 3654 tree fn = get_callee_fndecl (call); 3655 3656 /* For __builtin_expect ((long) (x), y) recurse into x as well 3657 if x is truth_value_p. */ 3658 if (fn 3659 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL 3660 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT 3661 && call_expr_nargs (call) == 2) 3662 { 3663 tree arg = CALL_EXPR_ARG (call, 0); 3664 if (arg) 3665 { 3666 if (TREE_CODE (arg) == NOP_EXPR 3667 && TREE_TYPE (arg) == TREE_TYPE (call)) 3668 arg = TREE_OPERAND (arg, 0); 3669 if (truth_value_p (TREE_CODE (arg))) 3670 { 3671 arg = gimple_boolify (arg); 3672 CALL_EXPR_ARG (call, 0) 3673 = fold_convert_loc (loc, TREE_TYPE (call), arg); 3674 } 3675 } 3676 } 3677 } 3678 3679 switch (TREE_CODE (expr)) 3680 { 3681 case TRUTH_AND_EXPR: 3682 case TRUTH_OR_EXPR: 3683 case TRUTH_XOR_EXPR: 3684 case TRUTH_ANDIF_EXPR: 3685 case TRUTH_ORIF_EXPR: 3686 /* Also boolify the arguments of truth exprs. */ 3687 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1)); 3688 /* FALLTHRU */ 3689 3690 case TRUTH_NOT_EXPR: 3691 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); 3692 3693 /* These expressions always produce boolean results. */ 3694 if (TREE_CODE (type) != BOOLEAN_TYPE) 3695 TREE_TYPE (expr) = boolean_type_node; 3696 return expr; 3697 3698 case ANNOTATE_EXPR: 3699 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1))) 3700 { 3701 case annot_expr_ivdep_kind: 3702 case annot_expr_no_vector_kind: 3703 case annot_expr_vector_kind: 3704 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); 3705 if (TREE_CODE (type) != BOOLEAN_TYPE) 3706 TREE_TYPE (expr) = boolean_type_node; 3707 return expr; 3708 default: 3709 gcc_unreachable (); 3710 } 3711 3712 default: 3713 if (COMPARISON_CLASS_P (expr)) 3714 { 3715 /* There expressions always prduce boolean results. */ 3716 if (TREE_CODE (type) != BOOLEAN_TYPE) 3717 TREE_TYPE (expr) = boolean_type_node; 3718 return expr; 3719 } 3720 /* Other expressions that get here must have boolean values, but 3721 might need to be converted to the appropriate mode. */ 3722 if (TREE_CODE (type) == BOOLEAN_TYPE) 3723 return expr; 3724 return fold_convert_loc (loc, boolean_type_node, expr); 3725 } 3726 } 3727 3728 /* Given a conditional expression *EXPR_P without side effects, gimplify 3729 its operands. New statements are inserted to PRE_P. */ 3730 3731 static enum gimplify_status 3732 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p) 3733 { 3734 tree expr = *expr_p, cond; 3735 enum gimplify_status ret, tret; 3736 enum tree_code code; 3737 3738 cond = gimple_boolify (COND_EXPR_COND (expr)); 3739 3740 /* We need to handle && and || specially, as their gimplification 3741 creates pure cond_expr, thus leading to an infinite cycle otherwise. */ 3742 code = TREE_CODE (cond); 3743 if (code == TRUTH_ANDIF_EXPR) 3744 TREE_SET_CODE (cond, TRUTH_AND_EXPR); 3745 else if (code == TRUTH_ORIF_EXPR) 3746 TREE_SET_CODE (cond, TRUTH_OR_EXPR); 3747 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue); 3748 COND_EXPR_COND (*expr_p) = cond; 3749 3750 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL, 3751 is_gimple_val, fb_rvalue); 3752 ret = MIN (ret, tret); 3753 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL, 3754 is_gimple_val, fb_rvalue); 3755 3756 return MIN (ret, tret); 3757 } 3758 3759 /* Return true if evaluating EXPR could trap. 3760 EXPR is GENERIC, while tree_could_trap_p can be called 3761 only on GIMPLE. */ 3762 3763 static bool 3764 generic_expr_could_trap_p (tree expr) 3765 { 3766 unsigned i, n; 3767 3768 if (!expr || is_gimple_val (expr)) 3769 return false; 3770 3771 if (!EXPR_P (expr) || tree_could_trap_p (expr)) 3772 return true; 3773 3774 n = TREE_OPERAND_LENGTH (expr); 3775 for (i = 0; i < n; i++) 3776 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i))) 3777 return true; 3778 3779 return false; 3780 } 3781 3782 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;' 3783 into 3784 3785 if (p) if (p) 3786 t1 = a; a; 3787 else or else 3788 t1 = b; b; 3789 t1; 3790 3791 The second form is used when *EXPR_P is of type void. 3792 3793 PRE_P points to the list where side effects that must happen before 3794 *EXPR_P should be stored. */ 3795 3796 static enum gimplify_status 3797 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback) 3798 { 3799 tree expr = *expr_p; 3800 tree type = TREE_TYPE (expr); 3801 location_t loc = EXPR_LOCATION (expr); 3802 tree tmp, arm1, arm2; 3803 enum gimplify_status ret; 3804 tree label_true, label_false, label_cont; 3805 bool have_then_clause_p, have_else_clause_p; 3806 gcond *cond_stmt; 3807 enum tree_code pred_code; 3808 gimple_seq seq = NULL; 3809 3810 /* If this COND_EXPR has a value, copy the values into a temporary within 3811 the arms. */ 3812 if (!VOID_TYPE_P (type)) 3813 { 3814 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2); 3815 tree result; 3816 3817 /* If either an rvalue is ok or we do not require an lvalue, create the 3818 temporary. But we cannot do that if the type is addressable. */ 3819 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue)) 3820 && !TREE_ADDRESSABLE (type)) 3821 { 3822 if (gimplify_ctxp->allow_rhs_cond_expr 3823 /* If either branch has side effects or could trap, it can't be 3824 evaluated unconditionally. */ 3825 && !TREE_SIDE_EFFECTS (then_) 3826 && !generic_expr_could_trap_p (then_) 3827 && !TREE_SIDE_EFFECTS (else_) 3828 && !generic_expr_could_trap_p (else_)) 3829 return gimplify_pure_cond_expr (expr_p, pre_p); 3830 3831 tmp = create_tmp_var (type, "iftmp"); 3832 result = tmp; 3833 } 3834 3835 /* Otherwise, only create and copy references to the values. */ 3836 else 3837 { 3838 type = build_pointer_type (type); 3839 3840 if (!VOID_TYPE_P (TREE_TYPE (then_))) 3841 then_ = build_fold_addr_expr_loc (loc, then_); 3842 3843 if (!VOID_TYPE_P (TREE_TYPE (else_))) 3844 else_ = build_fold_addr_expr_loc (loc, else_); 3845 3846 expr 3847 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_); 3848 3849 tmp = create_tmp_var (type, "iftmp"); 3850 result = build_simple_mem_ref_loc (loc, tmp); 3851 } 3852 3853 /* Build the new then clause, `tmp = then_;'. But don't build the 3854 assignment if the value is void; in C++ it can be if it's a throw. */ 3855 if (!VOID_TYPE_P (TREE_TYPE (then_))) 3856 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_); 3857 3858 /* Similarly, build the new else clause, `tmp = else_;'. */ 3859 if (!VOID_TYPE_P (TREE_TYPE (else_))) 3860 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_); 3861 3862 TREE_TYPE (expr) = void_type_node; 3863 recalculate_side_effects (expr); 3864 3865 /* Move the COND_EXPR to the prequeue. */ 3866 gimplify_stmt (&expr, pre_p); 3867 3868 *expr_p = result; 3869 return GS_ALL_DONE; 3870 } 3871 3872 /* Remove any COMPOUND_EXPR so the following cases will be caught. */ 3873 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0)); 3874 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR) 3875 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true); 3876 3877 /* Make sure the condition has BOOLEAN_TYPE. */ 3878 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); 3879 3880 /* Break apart && and || conditions. */ 3881 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR 3882 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR) 3883 { 3884 expr = shortcut_cond_expr (expr); 3885 3886 if (expr != *expr_p) 3887 { 3888 *expr_p = expr; 3889 3890 /* We can't rely on gimplify_expr to re-gimplify the expanded 3891 form properly, as cleanups might cause the target labels to be 3892 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to 3893 set up a conditional context. */ 3894 gimple_push_condition (); 3895 gimplify_stmt (expr_p, &seq); 3896 gimple_pop_condition (pre_p); 3897 gimple_seq_add_seq (pre_p, seq); 3898 3899 return GS_ALL_DONE; 3900 } 3901 } 3902 3903 /* Now do the normal gimplification. */ 3904 3905 /* Gimplify condition. */ 3906 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr, 3907 fb_rvalue); 3908 if (ret == GS_ERROR) 3909 return GS_ERROR; 3910 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE); 3911 3912 gimple_push_condition (); 3913 3914 have_then_clause_p = have_else_clause_p = false; 3915 if (TREE_OPERAND (expr, 1) != NULL 3916 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR 3917 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL 3918 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) 3919 == current_function_decl) 3920 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR 3921 have different locations, otherwise we end up with incorrect 3922 location information on the branches. */ 3923 && (optimize 3924 || !EXPR_HAS_LOCATION (expr) 3925 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1)) 3926 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1)))) 3927 { 3928 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1)); 3929 have_then_clause_p = true; 3930 } 3931 else 3932 label_true = create_artificial_label (UNKNOWN_LOCATION); 3933 if (TREE_OPERAND (expr, 2) != NULL 3934 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR 3935 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL 3936 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) 3937 == current_function_decl) 3938 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR 3939 have different locations, otherwise we end up with incorrect 3940 location information on the branches. */ 3941 && (optimize 3942 || !EXPR_HAS_LOCATION (expr) 3943 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2)) 3944 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2)))) 3945 { 3946 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2)); 3947 have_else_clause_p = true; 3948 } 3949 else 3950 label_false = create_artificial_label (UNKNOWN_LOCATION); 3951 3952 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1, 3953 &arm2); 3954 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, 3955 label_false); 3956 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr))); 3957 gimplify_seq_add_stmt (&seq, cond_stmt); 3958 gimple_stmt_iterator gsi = gsi_last (seq); 3959 maybe_fold_stmt (&gsi); 3960 3961 label_cont = NULL_TREE; 3962 if (!have_then_clause_p) 3963 { 3964 /* For if (...) {} else { code; } put label_true after 3965 the else block. */ 3966 if (TREE_OPERAND (expr, 1) == NULL_TREE 3967 && !have_else_clause_p 3968 && TREE_OPERAND (expr, 2) != NULL_TREE) 3969 label_cont = label_true; 3970 else 3971 { 3972 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true)); 3973 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq); 3974 /* For if (...) { code; } else {} or 3975 if (...) { code; } else goto label; or 3976 if (...) { code; return; } else { ... } 3977 label_cont isn't needed. */ 3978 if (!have_else_clause_p 3979 && TREE_OPERAND (expr, 2) != NULL_TREE 3980 && gimple_seq_may_fallthru (seq)) 3981 { 3982 gimple *g; 3983 label_cont = create_artificial_label (UNKNOWN_LOCATION); 3984 3985 g = gimple_build_goto (label_cont); 3986 3987 /* GIMPLE_COND's are very low level; they have embedded 3988 gotos. This particular embedded goto should not be marked 3989 with the location of the original COND_EXPR, as it would 3990 correspond to the COND_EXPR's condition, not the ELSE or the 3991 THEN arms. To avoid marking it with the wrong location, flag 3992 it as "no location". */ 3993 gimple_set_do_not_emit_location (g); 3994 3995 gimplify_seq_add_stmt (&seq, g); 3996 } 3997 } 3998 } 3999 if (!have_else_clause_p) 4000 { 4001 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false)); 4002 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq); 4003 } 4004 if (label_cont) 4005 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont)); 4006 4007 gimple_pop_condition (pre_p); 4008 gimple_seq_add_seq (pre_p, seq); 4009 4010 if (ret == GS_ERROR) 4011 ; /* Do nothing. */ 4012 else if (have_then_clause_p || have_else_clause_p) 4013 ret = GS_ALL_DONE; 4014 else 4015 { 4016 /* Both arms are empty; replace the COND_EXPR with its predicate. */ 4017 expr = TREE_OPERAND (expr, 0); 4018 gimplify_stmt (&expr, pre_p); 4019 } 4020 4021 *expr_p = NULL; 4022 return ret; 4023 } 4024 4025 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression, 4026 to be marked addressable. 4027 4028 We cannot rely on such an expression being directly markable if a temporary 4029 has been created by the gimplification. In this case, we create another 4030 temporary and initialize it with a copy, which will become a store after we 4031 mark it addressable. This can happen if the front-end passed us something 4032 that it could not mark addressable yet, like a Fortran pass-by-reference 4033 parameter (int) floatvar. */ 4034 4035 static void 4036 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p) 4037 { 4038 while (handled_component_p (*expr_p)) 4039 expr_p = &TREE_OPERAND (*expr_p, 0); 4040 if (is_gimple_reg (*expr_p)) 4041 { 4042 /* Do not allow an SSA name as the temporary. */ 4043 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false); 4044 DECL_GIMPLE_REG_P (var) = 0; 4045 *expr_p = var; 4046 } 4047 } 4048 4049 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with 4050 a call to __builtin_memcpy. */ 4051 4052 static enum gimplify_status 4053 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value, 4054 gimple_seq *seq_p) 4055 { 4056 tree t, to, to_ptr, from, from_ptr; 4057 gcall *gs; 4058 location_t loc = EXPR_LOCATION (*expr_p); 4059 4060 to = TREE_OPERAND (*expr_p, 0); 4061 from = TREE_OPERAND (*expr_p, 1); 4062 4063 /* Mark the RHS addressable. Beware that it may not be possible to do so 4064 directly if a temporary has been created by the gimplification. */ 4065 prepare_gimple_addressable (&from, seq_p); 4066 4067 mark_addressable (from); 4068 from_ptr = build_fold_addr_expr_loc (loc, from); 4069 gimplify_arg (&from_ptr, seq_p, loc); 4070 4071 mark_addressable (to); 4072 to_ptr = build_fold_addr_expr_loc (loc, to); 4073 gimplify_arg (&to_ptr, seq_p, loc); 4074 4075 t = builtin_decl_implicit (BUILT_IN_MEMCPY); 4076 4077 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size); 4078 4079 if (want_value) 4080 { 4081 /* tmp = memcpy() */ 4082 t = create_tmp_var (TREE_TYPE (to_ptr)); 4083 gimple_call_set_lhs (gs, t); 4084 gimplify_seq_add_stmt (seq_p, gs); 4085 4086 *expr_p = build_simple_mem_ref (t); 4087 return GS_ALL_DONE; 4088 } 4089 4090 gimplify_seq_add_stmt (seq_p, gs); 4091 *expr_p = NULL; 4092 return GS_ALL_DONE; 4093 } 4094 4095 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with 4096 a call to __builtin_memset. In this case we know that the RHS is 4097 a CONSTRUCTOR with an empty element list. */ 4098 4099 static enum gimplify_status 4100 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value, 4101 gimple_seq *seq_p) 4102 { 4103 tree t, from, to, to_ptr; 4104 gcall *gs; 4105 location_t loc = EXPR_LOCATION (*expr_p); 4106 4107 /* Assert our assumptions, to abort instead of producing wrong code 4108 silently if they are not met. Beware that the RHS CONSTRUCTOR might 4109 not be immediately exposed. */ 4110 from = TREE_OPERAND (*expr_p, 1); 4111 if (TREE_CODE (from) == WITH_SIZE_EXPR) 4112 from = TREE_OPERAND (from, 0); 4113 4114 gcc_assert (TREE_CODE (from) == CONSTRUCTOR 4115 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from))); 4116 4117 /* Now proceed. */ 4118 to = TREE_OPERAND (*expr_p, 0); 4119 4120 to_ptr = build_fold_addr_expr_loc (loc, to); 4121 gimplify_arg (&to_ptr, seq_p, loc); 4122 t = builtin_decl_implicit (BUILT_IN_MEMSET); 4123 4124 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size); 4125 4126 if (want_value) 4127 { 4128 /* tmp = memset() */ 4129 t = create_tmp_var (TREE_TYPE (to_ptr)); 4130 gimple_call_set_lhs (gs, t); 4131 gimplify_seq_add_stmt (seq_p, gs); 4132 4133 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t); 4134 return GS_ALL_DONE; 4135 } 4136 4137 gimplify_seq_add_stmt (seq_p, gs); 4138 *expr_p = NULL; 4139 return GS_ALL_DONE; 4140 } 4141 4142 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree, 4143 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an 4144 assignment. Return non-null if we detect a potential overlap. */ 4145 4146 struct gimplify_init_ctor_preeval_data 4147 { 4148 /* The base decl of the lhs object. May be NULL, in which case we 4149 have to assume the lhs is indirect. */ 4150 tree lhs_base_decl; 4151 4152 /* The alias set of the lhs object. */ 4153 alias_set_type lhs_alias_set; 4154 }; 4155 4156 static tree 4157 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata) 4158 { 4159 struct gimplify_init_ctor_preeval_data *data 4160 = (struct gimplify_init_ctor_preeval_data *) xdata; 4161 tree t = *tp; 4162 4163 /* If we find the base object, obviously we have overlap. */ 4164 if (data->lhs_base_decl == t) 4165 return t; 4166 4167 /* If the constructor component is indirect, determine if we have a 4168 potential overlap with the lhs. The only bits of information we 4169 have to go on at this point are addressability and alias sets. */ 4170 if ((INDIRECT_REF_P (t) 4171 || TREE_CODE (t) == MEM_REF) 4172 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl)) 4173 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t))) 4174 return t; 4175 4176 /* If the constructor component is a call, determine if it can hide a 4177 potential overlap with the lhs through an INDIRECT_REF like above. 4178 ??? Ugh - this is completely broken. In fact this whole analysis 4179 doesn't look conservative. */ 4180 if (TREE_CODE (t) == CALL_EXPR) 4181 { 4182 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t))); 4183 4184 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type)) 4185 if (POINTER_TYPE_P (TREE_VALUE (type)) 4186 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl)) 4187 && alias_sets_conflict_p (data->lhs_alias_set, 4188 get_alias_set 4189 (TREE_TYPE (TREE_VALUE (type))))) 4190 return t; 4191 } 4192 4193 if (IS_TYPE_OR_DECL_P (t)) 4194 *walk_subtrees = 0; 4195 return NULL; 4196 } 4197 4198 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR, 4199 force values that overlap with the lhs (as described by *DATA) 4200 into temporaries. */ 4201 4202 static void 4203 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 4204 struct gimplify_init_ctor_preeval_data *data) 4205 { 4206 enum gimplify_status one; 4207 4208 /* If the value is constant, then there's nothing to pre-evaluate. */ 4209 if (TREE_CONSTANT (*expr_p)) 4210 { 4211 /* Ensure it does not have side effects, it might contain a reference to 4212 the object we're initializing. */ 4213 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p)); 4214 return; 4215 } 4216 4217 /* If the type has non-trivial constructors, we can't pre-evaluate. */ 4218 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p))) 4219 return; 4220 4221 /* Recurse for nested constructors. */ 4222 if (TREE_CODE (*expr_p) == CONSTRUCTOR) 4223 { 4224 unsigned HOST_WIDE_INT ix; 4225 constructor_elt *ce; 4226 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p); 4227 4228 FOR_EACH_VEC_SAFE_ELT (v, ix, ce) 4229 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data); 4230 4231 return; 4232 } 4233 4234 /* If this is a variable sized type, we must remember the size. */ 4235 maybe_with_size_expr (expr_p); 4236 4237 /* Gimplify the constructor element to something appropriate for the rhs 4238 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know 4239 the gimplifier will consider this a store to memory. Doing this 4240 gimplification now means that we won't have to deal with complicated 4241 language-specific trees, nor trees like SAVE_EXPR that can induce 4242 exponential search behavior. */ 4243 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue); 4244 if (one == GS_ERROR) 4245 { 4246 *expr_p = NULL; 4247 return; 4248 } 4249 4250 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap 4251 with the lhs, since "a = { .x=a }" doesn't make sense. This will 4252 always be true for all scalars, since is_gimple_mem_rhs insists on a 4253 temporary variable for them. */ 4254 if (DECL_P (*expr_p)) 4255 return; 4256 4257 /* If this is of variable size, we have no choice but to assume it doesn't 4258 overlap since we can't make a temporary for it. */ 4259 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST) 4260 return; 4261 4262 /* Otherwise, we must search for overlap ... */ 4263 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL)) 4264 return; 4265 4266 /* ... and if found, force the value into a temporary. */ 4267 *expr_p = get_formal_tmp_var (*expr_p, pre_p); 4268 } 4269 4270 /* A subroutine of gimplify_init_ctor_eval. Create a loop for 4271 a RANGE_EXPR in a CONSTRUCTOR for an array. 4272 4273 var = lower; 4274 loop_entry: 4275 object[var] = value; 4276 if (var == upper) 4277 goto loop_exit; 4278 var = var + 1; 4279 goto loop_entry; 4280 loop_exit: 4281 4282 We increment var _after_ the loop exit check because we might otherwise 4283 fail if upper == TYPE_MAX_VALUE (type for upper). 4284 4285 Note that we never have to deal with SAVE_EXPRs here, because this has 4286 already been taken care of for us, in gimplify_init_ctor_preeval(). */ 4287 4288 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *, 4289 gimple_seq *, bool); 4290 4291 static void 4292 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper, 4293 tree value, tree array_elt_type, 4294 gimple_seq *pre_p, bool cleared) 4295 { 4296 tree loop_entry_label, loop_exit_label, fall_thru_label; 4297 tree var, var_type, cref, tmp; 4298 4299 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION); 4300 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION); 4301 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION); 4302 4303 /* Create and initialize the index variable. */ 4304 var_type = TREE_TYPE (upper); 4305 var = create_tmp_var (var_type); 4306 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower)); 4307 4308 /* Add the loop entry label. */ 4309 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label)); 4310 4311 /* Build the reference. */ 4312 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object), 4313 var, NULL_TREE, NULL_TREE); 4314 4315 /* If we are a constructor, just call gimplify_init_ctor_eval to do 4316 the store. Otherwise just assign value to the reference. */ 4317 4318 if (TREE_CODE (value) == CONSTRUCTOR) 4319 /* NB we might have to call ourself recursively through 4320 gimplify_init_ctor_eval if the value is a constructor. */ 4321 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value), 4322 pre_p, cleared); 4323 else 4324 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value)); 4325 4326 /* We exit the loop when the index var is equal to the upper bound. */ 4327 gimplify_seq_add_stmt (pre_p, 4328 gimple_build_cond (EQ_EXPR, var, upper, 4329 loop_exit_label, fall_thru_label)); 4330 4331 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label)); 4332 4333 /* Otherwise, increment the index var... */ 4334 tmp = build2 (PLUS_EXPR, var_type, var, 4335 fold_convert (var_type, integer_one_node)); 4336 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp)); 4337 4338 /* ...and jump back to the loop entry. */ 4339 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label)); 4340 4341 /* Add the loop exit label. */ 4342 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label)); 4343 } 4344 4345 /* Return true if FDECL is accessing a field that is zero sized. */ 4346 4347 static bool 4348 zero_sized_field_decl (const_tree fdecl) 4349 { 4350 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl) 4351 && integer_zerop (DECL_SIZE (fdecl))) 4352 return true; 4353 return false; 4354 } 4355 4356 /* Return true if TYPE is zero sized. */ 4357 4358 static bool 4359 zero_sized_type (const_tree type) 4360 { 4361 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type) 4362 && integer_zerop (TYPE_SIZE (type))) 4363 return true; 4364 return false; 4365 } 4366 4367 /* A subroutine of gimplify_init_constructor. Generate individual 4368 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the 4369 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the 4370 CONSTRUCTOR. CLEARED is true if the entire LHS object has been 4371 zeroed first. */ 4372 4373 static void 4374 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts, 4375 gimple_seq *pre_p, bool cleared) 4376 { 4377 tree array_elt_type = NULL; 4378 unsigned HOST_WIDE_INT ix; 4379 tree purpose, value; 4380 4381 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE) 4382 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object))); 4383 4384 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value) 4385 { 4386 tree cref; 4387 4388 /* NULL values are created above for gimplification errors. */ 4389 if (value == NULL) 4390 continue; 4391 4392 if (cleared && initializer_zerop (value)) 4393 continue; 4394 4395 /* ??? Here's to hoping the front end fills in all of the indices, 4396 so we don't have to figure out what's missing ourselves. */ 4397 gcc_assert (purpose); 4398 4399 /* Skip zero-sized fields, unless value has side-effects. This can 4400 happen with calls to functions returning a zero-sized type, which 4401 we shouldn't discard. As a number of downstream passes don't 4402 expect sets of zero-sized fields, we rely on the gimplification of 4403 the MODIFY_EXPR we make below to drop the assignment statement. */ 4404 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose)) 4405 continue; 4406 4407 /* If we have a RANGE_EXPR, we have to build a loop to assign the 4408 whole range. */ 4409 if (TREE_CODE (purpose) == RANGE_EXPR) 4410 { 4411 tree lower = TREE_OPERAND (purpose, 0); 4412 tree upper = TREE_OPERAND (purpose, 1); 4413 4414 /* If the lower bound is equal to upper, just treat it as if 4415 upper was the index. */ 4416 if (simple_cst_equal (lower, upper)) 4417 purpose = upper; 4418 else 4419 { 4420 gimplify_init_ctor_eval_range (object, lower, upper, value, 4421 array_elt_type, pre_p, cleared); 4422 continue; 4423 } 4424 } 4425 4426 if (array_elt_type) 4427 { 4428 /* Do not use bitsizetype for ARRAY_REF indices. */ 4429 if (TYPE_DOMAIN (TREE_TYPE (object))) 4430 purpose 4431 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))), 4432 purpose); 4433 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object), 4434 purpose, NULL_TREE, NULL_TREE); 4435 } 4436 else 4437 { 4438 gcc_assert (TREE_CODE (purpose) == FIELD_DECL); 4439 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose), 4440 unshare_expr (object), purpose, NULL_TREE); 4441 } 4442 4443 if (TREE_CODE (value) == CONSTRUCTOR 4444 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE) 4445 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value), 4446 pre_p, cleared); 4447 else 4448 { 4449 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value); 4450 gimplify_and_add (init, pre_p); 4451 ggc_free (init); 4452 } 4453 } 4454 } 4455 4456 /* Return the appropriate RHS predicate for this LHS. */ 4457 4458 gimple_predicate 4459 rhs_predicate_for (tree lhs) 4460 { 4461 if (is_gimple_reg (lhs)) 4462 return is_gimple_reg_rhs_or_call; 4463 else 4464 return is_gimple_mem_rhs_or_call; 4465 } 4466 4467 /* Return the initial guess for an appropriate RHS predicate for this LHS, 4468 before the LHS has been gimplified. */ 4469 4470 static gimple_predicate 4471 initial_rhs_predicate_for (tree lhs) 4472 { 4473 if (is_gimple_reg_type (TREE_TYPE (lhs))) 4474 return is_gimple_reg_rhs_or_call; 4475 else 4476 return is_gimple_mem_rhs_or_call; 4477 } 4478 4479 /* Gimplify a C99 compound literal expression. This just means adding 4480 the DECL_EXPR before the current statement and using its anonymous 4481 decl instead. */ 4482 4483 static enum gimplify_status 4484 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p, 4485 bool (*gimple_test_f) (tree), 4486 fallback_t fallback) 4487 { 4488 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p); 4489 tree decl = DECL_EXPR_DECL (decl_s); 4490 tree init = DECL_INITIAL (decl); 4491 /* Mark the decl as addressable if the compound literal 4492 expression is addressable now, otherwise it is marked too late 4493 after we gimplify the initialization expression. */ 4494 if (TREE_ADDRESSABLE (*expr_p)) 4495 TREE_ADDRESSABLE (decl) = 1; 4496 /* Otherwise, if we don't need an lvalue and have a literal directly 4497 substitute it. Check if it matches the gimple predicate, as 4498 otherwise we'd generate a new temporary, and we can as well just 4499 use the decl we already have. */ 4500 else if (!TREE_ADDRESSABLE (decl) 4501 && init 4502 && (fallback & fb_lvalue) == 0 4503 && gimple_test_f (init)) 4504 { 4505 *expr_p = init; 4506 return GS_OK; 4507 } 4508 4509 /* Preliminarily mark non-addressed complex variables as eligible 4510 for promotion to gimple registers. We'll transform their uses 4511 as we find them. */ 4512 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE 4513 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE) 4514 && !TREE_THIS_VOLATILE (decl) 4515 && !needs_to_live_in_memory (decl)) 4516 DECL_GIMPLE_REG_P (decl) = 1; 4517 4518 /* If the decl is not addressable, then it is being used in some 4519 expression or on the right hand side of a statement, and it can 4520 be put into a readonly data section. */ 4521 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0) 4522 TREE_READONLY (decl) = 1; 4523 4524 /* This decl isn't mentioned in the enclosing block, so add it to the 4525 list of temps. FIXME it seems a bit of a kludge to say that 4526 anonymous artificial vars aren't pushed, but everything else is. */ 4527 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl)) 4528 gimple_add_tmp_var (decl); 4529 4530 gimplify_and_add (decl_s, pre_p); 4531 *expr_p = decl; 4532 return GS_OK; 4533 } 4534 4535 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR, 4536 return a new CONSTRUCTOR if something changed. */ 4537 4538 static tree 4539 optimize_compound_literals_in_ctor (tree orig_ctor) 4540 { 4541 tree ctor = orig_ctor; 4542 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor); 4543 unsigned int idx, num = vec_safe_length (elts); 4544 4545 for (idx = 0; idx < num; idx++) 4546 { 4547 tree value = (*elts)[idx].value; 4548 tree newval = value; 4549 if (TREE_CODE (value) == CONSTRUCTOR) 4550 newval = optimize_compound_literals_in_ctor (value); 4551 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR) 4552 { 4553 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value); 4554 tree decl = DECL_EXPR_DECL (decl_s); 4555 tree init = DECL_INITIAL (decl); 4556 4557 if (!TREE_ADDRESSABLE (value) 4558 && !TREE_ADDRESSABLE (decl) 4559 && init 4560 && TREE_CODE (init) == CONSTRUCTOR) 4561 newval = optimize_compound_literals_in_ctor (init); 4562 } 4563 if (newval == value) 4564 continue; 4565 4566 if (ctor == orig_ctor) 4567 { 4568 ctor = copy_node (orig_ctor); 4569 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts); 4570 elts = CONSTRUCTOR_ELTS (ctor); 4571 } 4572 (*elts)[idx].value = newval; 4573 } 4574 return ctor; 4575 } 4576 4577 /* A subroutine of gimplify_modify_expr. Break out elements of a 4578 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs. 4579 4580 Note that we still need to clear any elements that don't have explicit 4581 initializers, so if not all elements are initialized we keep the 4582 original MODIFY_EXPR, we just remove all of the constructor elements. 4583 4584 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return 4585 GS_ERROR if we would have to create a temporary when gimplifying 4586 this constructor. Otherwise, return GS_OK. 4587 4588 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */ 4589 4590 static enum gimplify_status 4591 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 4592 bool want_value, bool notify_temp_creation) 4593 { 4594 tree object, ctor, type; 4595 enum gimplify_status ret; 4596 vec<constructor_elt, va_gc> *elts; 4597 4598 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR); 4599 4600 if (!notify_temp_creation) 4601 { 4602 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 4603 is_gimple_lvalue, fb_lvalue); 4604 if (ret == GS_ERROR) 4605 return ret; 4606 } 4607 4608 object = TREE_OPERAND (*expr_p, 0); 4609 ctor = TREE_OPERAND (*expr_p, 1) 4610 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1)); 4611 type = TREE_TYPE (ctor); 4612 elts = CONSTRUCTOR_ELTS (ctor); 4613 ret = GS_ALL_DONE; 4614 4615 switch (TREE_CODE (type)) 4616 { 4617 case RECORD_TYPE: 4618 case UNION_TYPE: 4619 case QUAL_UNION_TYPE: 4620 case ARRAY_TYPE: 4621 { 4622 struct gimplify_init_ctor_preeval_data preeval_data; 4623 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements; 4624 bool cleared, complete_p, valid_const_initializer; 4625 4626 /* Aggregate types must lower constructors to initialization of 4627 individual elements. The exception is that a CONSTRUCTOR node 4628 with no elements indicates zero-initialization of the whole. */ 4629 if (vec_safe_is_empty (elts)) 4630 { 4631 if (notify_temp_creation) 4632 return GS_OK; 4633 break; 4634 } 4635 4636 /* Fetch information about the constructor to direct later processing. 4637 We might want to make static versions of it in various cases, and 4638 can only do so if it known to be a valid constant initializer. */ 4639 valid_const_initializer 4640 = categorize_ctor_elements (ctor, &num_nonzero_elements, 4641 &num_ctor_elements, &complete_p); 4642 4643 /* If a const aggregate variable is being initialized, then it 4644 should never be a lose to promote the variable to be static. */ 4645 if (valid_const_initializer 4646 && num_nonzero_elements > 1 4647 && TREE_READONLY (object) 4648 && VAR_P (object) 4649 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))) 4650 { 4651 if (notify_temp_creation) 4652 return GS_ERROR; 4653 DECL_INITIAL (object) = ctor; 4654 TREE_STATIC (object) = 1; 4655 if (!DECL_NAME (object)) 4656 DECL_NAME (object) = create_tmp_var_name ("C"); 4657 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL); 4658 4659 /* ??? C++ doesn't automatically append a .<number> to the 4660 assembler name, and even when it does, it looks at FE private 4661 data structures to figure out what that number should be, 4662 which are not set for this variable. I suppose this is 4663 important for local statics for inline functions, which aren't 4664 "local" in the object file sense. So in order to get a unique 4665 TU-local symbol, we must invoke the lhd version now. */ 4666 lhd_set_decl_assembler_name (object); 4667 4668 *expr_p = NULL_TREE; 4669 break; 4670 } 4671 4672 /* If there are "lots" of initialized elements, even discounting 4673 those that are not address constants (and thus *must* be 4674 computed at runtime), then partition the constructor into 4675 constant and non-constant parts. Block copy the constant 4676 parts in, then generate code for the non-constant parts. */ 4677 /* TODO. There's code in cp/typeck.c to do this. */ 4678 4679 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0) 4680 /* store_constructor will ignore the clearing of variable-sized 4681 objects. Initializers for such objects must explicitly set 4682 every field that needs to be set. */ 4683 cleared = false; 4684 else if (!complete_p) 4685 /* If the constructor isn't complete, clear the whole object 4686 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it. 4687 4688 ??? This ought not to be needed. For any element not present 4689 in the initializer, we should simply set them to zero. Except 4690 we'd need to *find* the elements that are not present, and that 4691 requires trickery to avoid quadratic compile-time behavior in 4692 large cases or excessive memory use in small cases. */ 4693 cleared = !CONSTRUCTOR_NO_CLEARING (ctor); 4694 else if (num_ctor_elements - num_nonzero_elements 4695 > CLEAR_RATIO (optimize_function_for_speed_p (cfun)) 4696 && num_nonzero_elements < num_ctor_elements / 4) 4697 /* If there are "lots" of zeros, it's more efficient to clear 4698 the memory and then set the nonzero elements. */ 4699 cleared = true; 4700 else 4701 cleared = false; 4702 4703 /* If there are "lots" of initialized elements, and all of them 4704 are valid address constants, then the entire initializer can 4705 be dropped to memory, and then memcpy'd out. Don't do this 4706 for sparse arrays, though, as it's more efficient to follow 4707 the standard CONSTRUCTOR behavior of memset followed by 4708 individual element initialization. Also don't do this for small 4709 all-zero initializers (which aren't big enough to merit 4710 clearing), and don't try to make bitwise copies of 4711 TREE_ADDRESSABLE types. 4712 4713 We cannot apply such transformation when compiling chkp static 4714 initializer because creation of initializer image in the memory 4715 will require static initialization of bounds for it. It should 4716 result in another gimplification of similar initializer and we 4717 may fall into infinite loop. */ 4718 if (valid_const_initializer 4719 && !(cleared || num_nonzero_elements == 0) 4720 && !TREE_ADDRESSABLE (type) 4721 && (!current_function_decl 4722 || !lookup_attribute ("chkp ctor", 4723 DECL_ATTRIBUTES (current_function_decl)))) 4724 { 4725 HOST_WIDE_INT size = int_size_in_bytes (type); 4726 unsigned int align; 4727 4728 /* ??? We can still get unbounded array types, at least 4729 from the C++ front end. This seems wrong, but attempt 4730 to work around it for now. */ 4731 if (size < 0) 4732 { 4733 size = int_size_in_bytes (TREE_TYPE (object)); 4734 if (size >= 0) 4735 TREE_TYPE (ctor) = type = TREE_TYPE (object); 4736 } 4737 4738 /* Find the maximum alignment we can assume for the object. */ 4739 /* ??? Make use of DECL_OFFSET_ALIGN. */ 4740 if (DECL_P (object)) 4741 align = DECL_ALIGN (object); 4742 else 4743 align = TYPE_ALIGN (type); 4744 4745 /* Do a block move either if the size is so small as to make 4746 each individual move a sub-unit move on average, or if it 4747 is so large as to make individual moves inefficient. */ 4748 if (size > 0 4749 && num_nonzero_elements > 1 4750 && (size < num_nonzero_elements 4751 || !can_move_by_pieces (size, align))) 4752 { 4753 if (notify_temp_creation) 4754 return GS_ERROR; 4755 4756 walk_tree (&ctor, force_labels_r, NULL, NULL); 4757 ctor = tree_output_constant_def (ctor); 4758 if (!useless_type_conversion_p (type, TREE_TYPE (ctor))) 4759 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor); 4760 TREE_OPERAND (*expr_p, 1) = ctor; 4761 4762 /* This is no longer an assignment of a CONSTRUCTOR, but 4763 we still may have processing to do on the LHS. So 4764 pretend we didn't do anything here to let that happen. */ 4765 return GS_UNHANDLED; 4766 } 4767 } 4768 4769 /* If the target is volatile, we have non-zero elements and more than 4770 one field to assign, initialize the target from a temporary. */ 4771 if (TREE_THIS_VOLATILE (object) 4772 && !TREE_ADDRESSABLE (type) 4773 && num_nonzero_elements > 0 4774 && vec_safe_length (elts) > 1) 4775 { 4776 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type)); 4777 TREE_OPERAND (*expr_p, 0) = temp; 4778 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p), 4779 *expr_p, 4780 build2 (MODIFY_EXPR, void_type_node, 4781 object, temp)); 4782 return GS_OK; 4783 } 4784 4785 if (notify_temp_creation) 4786 return GS_OK; 4787 4788 /* If there are nonzero elements and if needed, pre-evaluate to capture 4789 elements overlapping with the lhs into temporaries. We must do this 4790 before clearing to fetch the values before they are zeroed-out. */ 4791 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR) 4792 { 4793 preeval_data.lhs_base_decl = get_base_address (object); 4794 if (!DECL_P (preeval_data.lhs_base_decl)) 4795 preeval_data.lhs_base_decl = NULL; 4796 preeval_data.lhs_alias_set = get_alias_set (object); 4797 4798 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1), 4799 pre_p, post_p, &preeval_data); 4800 } 4801 4802 bool ctor_has_side_effects_p 4803 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1)); 4804 4805 if (cleared) 4806 { 4807 /* Zap the CONSTRUCTOR element list, which simplifies this case. 4808 Note that we still have to gimplify, in order to handle the 4809 case of variable sized types. Avoid shared tree structures. */ 4810 CONSTRUCTOR_ELTS (ctor) = NULL; 4811 TREE_SIDE_EFFECTS (ctor) = 0; 4812 object = unshare_expr (object); 4813 gimplify_stmt (expr_p, pre_p); 4814 } 4815 4816 /* If we have not block cleared the object, or if there are nonzero 4817 elements in the constructor, or if the constructor has side effects, 4818 add assignments to the individual scalar fields of the object. */ 4819 if (!cleared 4820 || num_nonzero_elements > 0 4821 || ctor_has_side_effects_p) 4822 gimplify_init_ctor_eval (object, elts, pre_p, cleared); 4823 4824 *expr_p = NULL_TREE; 4825 } 4826 break; 4827 4828 case COMPLEX_TYPE: 4829 { 4830 tree r, i; 4831 4832 if (notify_temp_creation) 4833 return GS_OK; 4834 4835 /* Extract the real and imaginary parts out of the ctor. */ 4836 gcc_assert (elts->length () == 2); 4837 r = (*elts)[0].value; 4838 i = (*elts)[1].value; 4839 if (r == NULL || i == NULL) 4840 { 4841 tree zero = build_zero_cst (TREE_TYPE (type)); 4842 if (r == NULL) 4843 r = zero; 4844 if (i == NULL) 4845 i = zero; 4846 } 4847 4848 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to 4849 represent creation of a complex value. */ 4850 if (TREE_CONSTANT (r) && TREE_CONSTANT (i)) 4851 { 4852 ctor = build_complex (type, r, i); 4853 TREE_OPERAND (*expr_p, 1) = ctor; 4854 } 4855 else 4856 { 4857 ctor = build2 (COMPLEX_EXPR, type, r, i); 4858 TREE_OPERAND (*expr_p, 1) = ctor; 4859 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), 4860 pre_p, 4861 post_p, 4862 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)), 4863 fb_rvalue); 4864 } 4865 } 4866 break; 4867 4868 case VECTOR_TYPE: 4869 { 4870 unsigned HOST_WIDE_INT ix; 4871 constructor_elt *ce; 4872 4873 if (notify_temp_creation) 4874 return GS_OK; 4875 4876 /* Go ahead and simplify constant constructors to VECTOR_CST. */ 4877 if (TREE_CONSTANT (ctor)) 4878 { 4879 bool constant_p = true; 4880 tree value; 4881 4882 /* Even when ctor is constant, it might contain non-*_CST 4883 elements, such as addresses or trapping values like 4884 1.0/0.0 - 1.0/0.0. Such expressions don't belong 4885 in VECTOR_CST nodes. */ 4886 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value) 4887 if (!CONSTANT_CLASS_P (value)) 4888 { 4889 constant_p = false; 4890 break; 4891 } 4892 4893 if (constant_p) 4894 { 4895 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts); 4896 break; 4897 } 4898 4899 TREE_CONSTANT (ctor) = 0; 4900 } 4901 4902 /* Vector types use CONSTRUCTOR all the way through gimple 4903 compilation as a general initializer. */ 4904 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce) 4905 { 4906 enum gimplify_status tret; 4907 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val, 4908 fb_rvalue); 4909 if (tret == GS_ERROR) 4910 ret = GS_ERROR; 4911 else if (TREE_STATIC (ctor) 4912 && !initializer_constant_valid_p (ce->value, 4913 TREE_TYPE (ce->value))) 4914 TREE_STATIC (ctor) = 0; 4915 } 4916 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0))) 4917 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p); 4918 } 4919 break; 4920 4921 default: 4922 /* So how did we get a CONSTRUCTOR for a scalar type? */ 4923 gcc_unreachable (); 4924 } 4925 4926 if (ret == GS_ERROR) 4927 return GS_ERROR; 4928 /* If we have gimplified both sides of the initializer but have 4929 not emitted an assignment, do so now. */ 4930 if (*expr_p) 4931 { 4932 tree lhs = TREE_OPERAND (*expr_p, 0); 4933 tree rhs = TREE_OPERAND (*expr_p, 1); 4934 if (want_value && object == lhs) 4935 lhs = unshare_expr (lhs); 4936 gassign *init = gimple_build_assign (lhs, rhs); 4937 gimplify_seq_add_stmt (pre_p, init); 4938 } 4939 if (want_value) 4940 { 4941 *expr_p = object; 4942 return GS_OK; 4943 } 4944 else 4945 { 4946 *expr_p = NULL; 4947 return GS_ALL_DONE; 4948 } 4949 } 4950 4951 /* Given a pointer value OP0, return a simplified version of an 4952 indirection through OP0, or NULL_TREE if no simplification is 4953 possible. This may only be applied to a rhs of an expression. 4954 Note that the resulting type may be different from the type pointed 4955 to in the sense that it is still compatible from the langhooks 4956 point of view. */ 4957 4958 static tree 4959 gimple_fold_indirect_ref_rhs (tree t) 4960 { 4961 return gimple_fold_indirect_ref (t); 4962 } 4963 4964 /* Subroutine of gimplify_modify_expr to do simplifications of 4965 MODIFY_EXPRs based on the code of the RHS. We loop for as long as 4966 something changes. */ 4967 4968 static enum gimplify_status 4969 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, 4970 gimple_seq *pre_p, gimple_seq *post_p, 4971 bool want_value) 4972 { 4973 enum gimplify_status ret = GS_UNHANDLED; 4974 bool changed; 4975 4976 do 4977 { 4978 changed = false; 4979 switch (TREE_CODE (*from_p)) 4980 { 4981 case VAR_DECL: 4982 /* If we're assigning from a read-only variable initialized with 4983 a constructor, do the direct assignment from the constructor, 4984 but only if neither source nor target are volatile since this 4985 latter assignment might end up being done on a per-field basis. */ 4986 if (DECL_INITIAL (*from_p) 4987 && TREE_READONLY (*from_p) 4988 && !TREE_THIS_VOLATILE (*from_p) 4989 && !TREE_THIS_VOLATILE (*to_p) 4990 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR) 4991 { 4992 tree old_from = *from_p; 4993 enum gimplify_status subret; 4994 4995 /* Move the constructor into the RHS. */ 4996 *from_p = unshare_expr (DECL_INITIAL (*from_p)); 4997 4998 /* Let's see if gimplify_init_constructor will need to put 4999 it in memory. */ 5000 subret = gimplify_init_constructor (expr_p, NULL, NULL, 5001 false, true); 5002 if (subret == GS_ERROR) 5003 { 5004 /* If so, revert the change. */ 5005 *from_p = old_from; 5006 } 5007 else 5008 { 5009 ret = GS_OK; 5010 changed = true; 5011 } 5012 } 5013 break; 5014 case INDIRECT_REF: 5015 { 5016 /* If we have code like 5017 5018 *(const A*)(A*)&x 5019 5020 where the type of "x" is a (possibly cv-qualified variant 5021 of "A"), treat the entire expression as identical to "x". 5022 This kind of code arises in C++ when an object is bound 5023 to a const reference, and if "x" is a TARGET_EXPR we want 5024 to take advantage of the optimization below. */ 5025 bool volatile_p = TREE_THIS_VOLATILE (*from_p); 5026 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)); 5027 if (t) 5028 { 5029 if (TREE_THIS_VOLATILE (t) != volatile_p) 5030 { 5031 if (DECL_P (t)) 5032 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p), 5033 build_fold_addr_expr (t)); 5034 if (REFERENCE_CLASS_P (t)) 5035 TREE_THIS_VOLATILE (t) = volatile_p; 5036 } 5037 *from_p = t; 5038 ret = GS_OK; 5039 changed = true; 5040 } 5041 break; 5042 } 5043 5044 case TARGET_EXPR: 5045 { 5046 /* If we are initializing something from a TARGET_EXPR, strip the 5047 TARGET_EXPR and initialize it directly, if possible. This can't 5048 be done if the initializer is void, since that implies that the 5049 temporary is set in some non-trivial way. 5050 5051 ??? What about code that pulls out the temp and uses it 5052 elsewhere? I think that such code never uses the TARGET_EXPR as 5053 an initializer. If I'm wrong, we'll die because the temp won't 5054 have any RTL. In that case, I guess we'll need to replace 5055 references somehow. */ 5056 tree init = TARGET_EXPR_INITIAL (*from_p); 5057 5058 if (init 5059 && !VOID_TYPE_P (TREE_TYPE (init))) 5060 { 5061 *from_p = init; 5062 ret = GS_OK; 5063 changed = true; 5064 } 5065 } 5066 break; 5067 5068 case COMPOUND_EXPR: 5069 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be 5070 caught. */ 5071 gimplify_compound_expr (from_p, pre_p, true); 5072 ret = GS_OK; 5073 changed = true; 5074 break; 5075 5076 case CONSTRUCTOR: 5077 /* If we already made some changes, let the front end have a 5078 crack at this before we break it down. */ 5079 if (ret != GS_UNHANDLED) 5080 break; 5081 /* If we're initializing from a CONSTRUCTOR, break this into 5082 individual MODIFY_EXPRs. */ 5083 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value, 5084 false); 5085 5086 case COND_EXPR: 5087 /* If we're assigning to a non-register type, push the assignment 5088 down into the branches. This is mandatory for ADDRESSABLE types, 5089 since we cannot generate temporaries for such, but it saves a 5090 copy in other cases as well. */ 5091 if (!is_gimple_reg_type (TREE_TYPE (*from_p))) 5092 { 5093 /* This code should mirror the code in gimplify_cond_expr. */ 5094 enum tree_code code = TREE_CODE (*expr_p); 5095 tree cond = *from_p; 5096 tree result = *to_p; 5097 5098 ret = gimplify_expr (&result, pre_p, post_p, 5099 is_gimple_lvalue, fb_lvalue); 5100 if (ret != GS_ERROR) 5101 ret = GS_OK; 5102 5103 /* If we are going to write RESULT more than once, clear 5104 TREE_READONLY flag, otherwise we might incorrectly promote 5105 the variable to static const and initialize it at compile 5106 time in one of the branches. */ 5107 if (VAR_P (result) 5108 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node 5109 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node) 5110 TREE_READONLY (result) = 0; 5111 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node) 5112 TREE_OPERAND (cond, 1) 5113 = build2 (code, void_type_node, result, 5114 TREE_OPERAND (cond, 1)); 5115 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node) 5116 TREE_OPERAND (cond, 2) 5117 = build2 (code, void_type_node, unshare_expr (result), 5118 TREE_OPERAND (cond, 2)); 5119 5120 TREE_TYPE (cond) = void_type_node; 5121 recalculate_side_effects (cond); 5122 5123 if (want_value) 5124 { 5125 gimplify_and_add (cond, pre_p); 5126 *expr_p = unshare_expr (result); 5127 } 5128 else 5129 *expr_p = cond; 5130 return ret; 5131 } 5132 break; 5133 5134 case CALL_EXPR: 5135 /* For calls that return in memory, give *to_p as the CALL_EXPR's 5136 return slot so that we don't generate a temporary. */ 5137 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p) 5138 && aggregate_value_p (*from_p, *from_p)) 5139 { 5140 bool use_target; 5141 5142 if (!(rhs_predicate_for (*to_p))(*from_p)) 5143 /* If we need a temporary, *to_p isn't accurate. */ 5144 use_target = false; 5145 /* It's OK to use the return slot directly unless it's an NRV. */ 5146 else if (TREE_CODE (*to_p) == RESULT_DECL 5147 && DECL_NAME (*to_p) == NULL_TREE 5148 && needs_to_live_in_memory (*to_p)) 5149 use_target = true; 5150 else if (is_gimple_reg_type (TREE_TYPE (*to_p)) 5151 || (DECL_P (*to_p) && DECL_REGISTER (*to_p))) 5152 /* Don't force regs into memory. */ 5153 use_target = false; 5154 else if (TREE_CODE (*expr_p) == INIT_EXPR) 5155 /* It's OK to use the target directly if it's being 5156 initialized. */ 5157 use_target = true; 5158 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p))) 5159 != INTEGER_CST) 5160 /* Always use the target and thus RSO for variable-sized types. 5161 GIMPLE cannot deal with a variable-sized assignment 5162 embedded in a call statement. */ 5163 use_target = true; 5164 else if (TREE_CODE (*to_p) != SSA_NAME 5165 && (!is_gimple_variable (*to_p) 5166 || needs_to_live_in_memory (*to_p))) 5167 /* Don't use the original target if it's already addressable; 5168 if its address escapes, and the called function uses the 5169 NRV optimization, a conforming program could see *to_p 5170 change before the called function returns; see c++/19317. 5171 When optimizing, the return_slot pass marks more functions 5172 as safe after we have escape info. */ 5173 use_target = false; 5174 else 5175 use_target = true; 5176 5177 if (use_target) 5178 { 5179 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1; 5180 mark_addressable (*to_p); 5181 } 5182 } 5183 break; 5184 5185 case WITH_SIZE_EXPR: 5186 /* Likewise for calls that return an aggregate of non-constant size, 5187 since we would not be able to generate a temporary at all. */ 5188 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR) 5189 { 5190 *from_p = TREE_OPERAND (*from_p, 0); 5191 /* We don't change ret in this case because the 5192 WITH_SIZE_EXPR might have been added in 5193 gimplify_modify_expr, so returning GS_OK would lead to an 5194 infinite loop. */ 5195 changed = true; 5196 } 5197 break; 5198 5199 /* If we're initializing from a container, push the initialization 5200 inside it. */ 5201 case CLEANUP_POINT_EXPR: 5202 case BIND_EXPR: 5203 case STATEMENT_LIST: 5204 { 5205 tree wrap = *from_p; 5206 tree t; 5207 5208 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval, 5209 fb_lvalue); 5210 if (ret != GS_ERROR) 5211 ret = GS_OK; 5212 5213 t = voidify_wrapper_expr (wrap, *expr_p); 5214 gcc_assert (t == *expr_p); 5215 5216 if (want_value) 5217 { 5218 gimplify_and_add (wrap, pre_p); 5219 *expr_p = unshare_expr (*to_p); 5220 } 5221 else 5222 *expr_p = wrap; 5223 return GS_OK; 5224 } 5225 5226 case COMPOUND_LITERAL_EXPR: 5227 { 5228 tree complit = TREE_OPERAND (*expr_p, 1); 5229 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit); 5230 tree decl = DECL_EXPR_DECL (decl_s); 5231 tree init = DECL_INITIAL (decl); 5232 5233 /* struct T x = (struct T) { 0, 1, 2 } can be optimized 5234 into struct T x = { 0, 1, 2 } if the address of the 5235 compound literal has never been taken. */ 5236 if (!TREE_ADDRESSABLE (complit) 5237 && !TREE_ADDRESSABLE (decl) 5238 && init) 5239 { 5240 *expr_p = copy_node (*expr_p); 5241 TREE_OPERAND (*expr_p, 1) = init; 5242 return GS_OK; 5243 } 5244 } 5245 5246 default: 5247 break; 5248 } 5249 } 5250 while (changed); 5251 5252 return ret; 5253 } 5254 5255 5256 /* Return true if T looks like a valid GIMPLE statement. */ 5257 5258 static bool 5259 is_gimple_stmt (tree t) 5260 { 5261 const enum tree_code code = TREE_CODE (t); 5262 5263 switch (code) 5264 { 5265 case NOP_EXPR: 5266 /* The only valid NOP_EXPR is the empty statement. */ 5267 return IS_EMPTY_STMT (t); 5268 5269 case BIND_EXPR: 5270 case COND_EXPR: 5271 /* These are only valid if they're void. */ 5272 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t)); 5273 5274 case SWITCH_EXPR: 5275 case GOTO_EXPR: 5276 case RETURN_EXPR: 5277 case LABEL_EXPR: 5278 case CASE_LABEL_EXPR: 5279 case TRY_CATCH_EXPR: 5280 case TRY_FINALLY_EXPR: 5281 case EH_FILTER_EXPR: 5282 case CATCH_EXPR: 5283 case ASM_EXPR: 5284 case STATEMENT_LIST: 5285 case OACC_PARALLEL: 5286 case OACC_KERNELS: 5287 case OACC_DATA: 5288 case OACC_HOST_DATA: 5289 case OACC_DECLARE: 5290 case OACC_UPDATE: 5291 case OACC_ENTER_DATA: 5292 case OACC_EXIT_DATA: 5293 case OACC_CACHE: 5294 case OMP_PARALLEL: 5295 case OMP_FOR: 5296 case OMP_SIMD: 5297 case CILK_SIMD: 5298 case OMP_DISTRIBUTE: 5299 case OACC_LOOP: 5300 case OMP_SECTIONS: 5301 case OMP_SECTION: 5302 case OMP_SINGLE: 5303 case OMP_MASTER: 5304 case OMP_TASKGROUP: 5305 case OMP_ORDERED: 5306 case OMP_CRITICAL: 5307 case OMP_TASK: 5308 case OMP_TARGET: 5309 case OMP_TARGET_DATA: 5310 case OMP_TARGET_UPDATE: 5311 case OMP_TARGET_ENTER_DATA: 5312 case OMP_TARGET_EXIT_DATA: 5313 case OMP_TASKLOOP: 5314 case OMP_TEAMS: 5315 /* These are always void. */ 5316 return true; 5317 5318 case CALL_EXPR: 5319 case MODIFY_EXPR: 5320 case PREDICT_EXPR: 5321 /* These are valid regardless of their type. */ 5322 return true; 5323 5324 default: 5325 return false; 5326 } 5327 } 5328 5329 5330 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is 5331 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with 5332 DECL_GIMPLE_REG_P set. 5333 5334 IMPORTANT NOTE: This promotion is performed by introducing a load of the 5335 other, unmodified part of the complex object just before the total store. 5336 As a consequence, if the object is still uninitialized, an undefined value 5337 will be loaded into a register, which may result in a spurious exception 5338 if the register is floating-point and the value happens to be a signaling 5339 NaN for example. Then the fully-fledged complex operations lowering pass 5340 followed by a DCE pass are necessary in order to fix things up. */ 5341 5342 static enum gimplify_status 5343 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p, 5344 bool want_value) 5345 { 5346 enum tree_code code, ocode; 5347 tree lhs, rhs, new_rhs, other, realpart, imagpart; 5348 5349 lhs = TREE_OPERAND (*expr_p, 0); 5350 rhs = TREE_OPERAND (*expr_p, 1); 5351 code = TREE_CODE (lhs); 5352 lhs = TREE_OPERAND (lhs, 0); 5353 5354 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR; 5355 other = build1 (ocode, TREE_TYPE (rhs), lhs); 5356 TREE_NO_WARNING (other) = 1; 5357 other = get_formal_tmp_var (other, pre_p); 5358 5359 realpart = code == REALPART_EXPR ? rhs : other; 5360 imagpart = code == REALPART_EXPR ? other : rhs; 5361 5362 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart)) 5363 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart); 5364 else 5365 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart); 5366 5367 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs)); 5368 *expr_p = (want_value) ? rhs : NULL_TREE; 5369 5370 return GS_ALL_DONE; 5371 } 5372 5373 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P. 5374 5375 modify_expr 5376 : varname '=' rhs 5377 | '*' ID '=' rhs 5378 5379 PRE_P points to the list where side effects that must happen before 5380 *EXPR_P should be stored. 5381 5382 POST_P points to the list where side effects that must happen after 5383 *EXPR_P should be stored. 5384 5385 WANT_VALUE is nonzero iff we want to use the value of this expression 5386 in another expression. */ 5387 5388 static enum gimplify_status 5389 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 5390 bool want_value) 5391 { 5392 tree *from_p = &TREE_OPERAND (*expr_p, 1); 5393 tree *to_p = &TREE_OPERAND (*expr_p, 0); 5394 enum gimplify_status ret = GS_UNHANDLED; 5395 gimple *assign; 5396 location_t loc = EXPR_LOCATION (*expr_p); 5397 gimple_stmt_iterator gsi; 5398 5399 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR 5400 || TREE_CODE (*expr_p) == INIT_EXPR); 5401 5402 /* Trying to simplify a clobber using normal logic doesn't work, 5403 so handle it here. */ 5404 if (TREE_CLOBBER_P (*from_p)) 5405 { 5406 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue); 5407 if (ret == GS_ERROR) 5408 return ret; 5409 gcc_assert (!want_value 5410 && (VAR_P (*to_p) || TREE_CODE (*to_p) == MEM_REF)); 5411 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p)); 5412 *expr_p = NULL; 5413 return GS_ALL_DONE; 5414 } 5415 5416 /* Insert pointer conversions required by the middle-end that are not 5417 required by the frontend. This fixes middle-end type checking for 5418 for example gcc.dg/redecl-6.c. */ 5419 if (POINTER_TYPE_P (TREE_TYPE (*to_p))) 5420 { 5421 STRIP_USELESS_TYPE_CONVERSION (*from_p); 5422 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p))) 5423 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p); 5424 } 5425 5426 /* See if any simplifications can be done based on what the RHS is. */ 5427 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p, 5428 want_value); 5429 if (ret != GS_UNHANDLED) 5430 return ret; 5431 5432 /* For zero sized types only gimplify the left hand side and right hand 5433 side as statements and throw away the assignment. Do this after 5434 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable 5435 types properly. */ 5436 if (zero_sized_type (TREE_TYPE (*from_p)) 5437 && !want_value 5438 /* Don't do this for calls that return addressable types, expand_call 5439 relies on those having a lhs. */ 5440 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p)) 5441 && TREE_CODE (*from_p) == CALL_EXPR)) 5442 { 5443 gimplify_stmt (from_p, pre_p); 5444 gimplify_stmt (to_p, pre_p); 5445 *expr_p = NULL_TREE; 5446 return GS_ALL_DONE; 5447 } 5448 5449 /* If the value being copied is of variable width, compute the length 5450 of the copy into a WITH_SIZE_EXPR. Note that we need to do this 5451 before gimplifying any of the operands so that we can resolve any 5452 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses 5453 the size of the expression to be copied, not of the destination, so 5454 that is what we must do here. */ 5455 maybe_with_size_expr (from_p); 5456 5457 /* As a special case, we have to temporarily allow for assignments 5458 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is 5459 a toplevel statement, when gimplifying the GENERIC expression 5460 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple 5461 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>. 5462 5463 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To 5464 prevent gimplify_expr from trying to create a new temporary for 5465 foo's LHS, we tell it that it should only gimplify until it 5466 reaches the CALL_EXPR. On return from gimplify_expr, the newly 5467 created GIMPLE_CALL <foo> will be the last statement in *PRE_P 5468 and all we need to do here is set 'a' to be its LHS. */ 5469 5470 /* Gimplify the RHS first for C++17 and bug 71104. */ 5471 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p); 5472 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue); 5473 if (ret == GS_ERROR) 5474 return ret; 5475 5476 /* Then gimplify the LHS. */ 5477 /* If we gimplified the RHS to a CALL_EXPR and that call may return 5478 twice we have to make sure to gimplify into non-SSA as otherwise 5479 the abnormal edge added later will make those defs not dominate 5480 their uses. 5481 ??? Technically this applies only to the registers used in the 5482 resulting non-register *TO_P. */ 5483 bool saved_into_ssa = gimplify_ctxp->into_ssa; 5484 if (saved_into_ssa 5485 && TREE_CODE (*from_p) == CALL_EXPR 5486 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE) 5487 gimplify_ctxp->into_ssa = false; 5488 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue); 5489 gimplify_ctxp->into_ssa = saved_into_ssa; 5490 if (ret == GS_ERROR) 5491 return ret; 5492 5493 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial 5494 guess for the predicate was wrong. */ 5495 gimple_predicate final_pred = rhs_predicate_for (*to_p); 5496 if (final_pred != initial_pred) 5497 { 5498 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue); 5499 if (ret == GS_ERROR) 5500 return ret; 5501 } 5502 5503 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type 5504 size as argument to the call. */ 5505 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR) 5506 { 5507 tree call = TREE_OPERAND (*from_p, 0); 5508 tree vlasize = TREE_OPERAND (*from_p, 1); 5509 5510 if (TREE_CODE (call) == CALL_EXPR 5511 && CALL_EXPR_IFN (call) == IFN_VA_ARG) 5512 { 5513 int nargs = call_expr_nargs (call); 5514 tree type = TREE_TYPE (call); 5515 tree ap = CALL_EXPR_ARG (call, 0); 5516 tree tag = CALL_EXPR_ARG (call, 1); 5517 tree aptag = CALL_EXPR_ARG (call, 2); 5518 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call), 5519 IFN_VA_ARG, type, 5520 nargs + 1, ap, tag, 5521 aptag, vlasize); 5522 TREE_OPERAND (*from_p, 0) = newcall; 5523 } 5524 } 5525 5526 /* Now see if the above changed *from_p to something we handle specially. */ 5527 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p, 5528 want_value); 5529 if (ret != GS_UNHANDLED) 5530 return ret; 5531 5532 /* If we've got a variable sized assignment between two lvalues (i.e. does 5533 not involve a call), then we can make things a bit more straightforward 5534 by converting the assignment to memcpy or memset. */ 5535 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR) 5536 { 5537 tree from = TREE_OPERAND (*from_p, 0); 5538 tree size = TREE_OPERAND (*from_p, 1); 5539 5540 if (TREE_CODE (from) == CONSTRUCTOR) 5541 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p); 5542 5543 if (is_gimple_addressable (from)) 5544 { 5545 *from_p = from; 5546 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value, 5547 pre_p); 5548 } 5549 } 5550 5551 /* Transform partial stores to non-addressable complex variables into 5552 total stores. This allows us to use real instead of virtual operands 5553 for these variables, which improves optimization. */ 5554 if ((TREE_CODE (*to_p) == REALPART_EXPR 5555 || TREE_CODE (*to_p) == IMAGPART_EXPR) 5556 && is_gimple_reg (TREE_OPERAND (*to_p, 0))) 5557 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value); 5558 5559 /* Try to alleviate the effects of the gimplification creating artificial 5560 temporaries (see for example is_gimple_reg_rhs) on the debug info, but 5561 make sure not to create DECL_DEBUG_EXPR links across functions. */ 5562 if (!gimplify_ctxp->into_ssa 5563 && VAR_P (*from_p) 5564 && DECL_IGNORED_P (*from_p) 5565 && DECL_P (*to_p) 5566 && !DECL_IGNORED_P (*to_p) 5567 && decl_function_context (*to_p) == current_function_decl) 5568 { 5569 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p)) 5570 DECL_NAME (*from_p) 5571 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p))); 5572 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1; 5573 SET_DECL_DEBUG_EXPR (*from_p, *to_p); 5574 } 5575 5576 if (want_value && TREE_THIS_VOLATILE (*to_p)) 5577 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p); 5578 5579 if (TREE_CODE (*from_p) == CALL_EXPR) 5580 { 5581 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL 5582 instead of a GIMPLE_ASSIGN. */ 5583 gcall *call_stmt; 5584 if (CALL_EXPR_FN (*from_p) == NULL_TREE) 5585 { 5586 /* Gimplify internal functions created in the FEs. */ 5587 int nargs = call_expr_nargs (*from_p), i; 5588 enum internal_fn ifn = CALL_EXPR_IFN (*from_p); 5589 auto_vec<tree> vargs (nargs); 5590 5591 for (i = 0; i < nargs; i++) 5592 { 5593 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p, 5594 EXPR_LOCATION (*from_p)); 5595 vargs.quick_push (CALL_EXPR_ARG (*from_p, i)); 5596 } 5597 call_stmt = gimple_build_call_internal_vec (ifn, vargs); 5598 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p)); 5599 } 5600 else 5601 { 5602 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p)); 5603 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0); 5604 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p)); 5605 tree fndecl = get_callee_fndecl (*from_p); 5606 if (fndecl 5607 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL 5608 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT 5609 && call_expr_nargs (*from_p) == 3) 5610 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3, 5611 CALL_EXPR_ARG (*from_p, 0), 5612 CALL_EXPR_ARG (*from_p, 1), 5613 CALL_EXPR_ARG (*from_p, 2)); 5614 else 5615 { 5616 call_stmt = gimple_build_call_from_tree (*from_p); 5617 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype)); 5618 } 5619 } 5620 notice_special_calls (call_stmt); 5621 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p)) 5622 gimple_call_set_lhs (call_stmt, *to_p); 5623 else if (TREE_CODE (*to_p) == SSA_NAME) 5624 /* The above is somewhat premature, avoid ICEing later for a 5625 SSA name w/o a definition. We may have uses in the GIMPLE IL. 5626 ??? This doesn't make it a default-def. */ 5627 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop (); 5628 assign = call_stmt; 5629 } 5630 else 5631 { 5632 assign = gimple_build_assign (*to_p, *from_p); 5633 gimple_set_location (assign, EXPR_LOCATION (*expr_p)); 5634 if (COMPARISON_CLASS_P (*from_p)) 5635 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p)); 5636 } 5637 5638 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p)) 5639 { 5640 /* We should have got an SSA name from the start. */ 5641 gcc_assert (TREE_CODE (*to_p) == SSA_NAME 5642 || ! gimple_in_ssa_p (cfun)); 5643 } 5644 5645 gimplify_seq_add_stmt (pre_p, assign); 5646 gsi = gsi_last (*pre_p); 5647 maybe_fold_stmt (&gsi); 5648 5649 if (want_value) 5650 { 5651 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p); 5652 return GS_OK; 5653 } 5654 else 5655 *expr_p = NULL; 5656 5657 return GS_ALL_DONE; 5658 } 5659 5660 /* Gimplify a comparison between two variable-sized objects. Do this 5661 with a call to BUILT_IN_MEMCMP. */ 5662 5663 static enum gimplify_status 5664 gimplify_variable_sized_compare (tree *expr_p) 5665 { 5666 location_t loc = EXPR_LOCATION (*expr_p); 5667 tree op0 = TREE_OPERAND (*expr_p, 0); 5668 tree op1 = TREE_OPERAND (*expr_p, 1); 5669 tree t, arg, dest, src, expr; 5670 5671 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0)); 5672 arg = unshare_expr (arg); 5673 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0); 5674 src = build_fold_addr_expr_loc (loc, op1); 5675 dest = build_fold_addr_expr_loc (loc, op0); 5676 t = builtin_decl_implicit (BUILT_IN_MEMCMP); 5677 t = build_call_expr_loc (loc, t, 3, dest, src, arg); 5678 5679 expr 5680 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node); 5681 SET_EXPR_LOCATION (expr, loc); 5682 *expr_p = expr; 5683 5684 return GS_OK; 5685 } 5686 5687 /* Gimplify a comparison between two aggregate objects of integral scalar 5688 mode as a comparison between the bitwise equivalent scalar values. */ 5689 5690 static enum gimplify_status 5691 gimplify_scalar_mode_aggregate_compare (tree *expr_p) 5692 { 5693 location_t loc = EXPR_LOCATION (*expr_p); 5694 tree op0 = TREE_OPERAND (*expr_p, 0); 5695 tree op1 = TREE_OPERAND (*expr_p, 1); 5696 5697 tree type = TREE_TYPE (op0); 5698 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1); 5699 5700 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0); 5701 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1); 5702 5703 *expr_p 5704 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1); 5705 5706 return GS_OK; 5707 } 5708 5709 /* Gimplify an expression sequence. This function gimplifies each 5710 expression and rewrites the original expression with the last 5711 expression of the sequence in GIMPLE form. 5712 5713 PRE_P points to the list where the side effects for all the 5714 expressions in the sequence will be emitted. 5715 5716 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */ 5717 5718 static enum gimplify_status 5719 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) 5720 { 5721 tree t = *expr_p; 5722 5723 do 5724 { 5725 tree *sub_p = &TREE_OPERAND (t, 0); 5726 5727 if (TREE_CODE (*sub_p) == COMPOUND_EXPR) 5728 gimplify_compound_expr (sub_p, pre_p, false); 5729 else 5730 gimplify_stmt (sub_p, pre_p); 5731 5732 t = TREE_OPERAND (t, 1); 5733 } 5734 while (TREE_CODE (t) == COMPOUND_EXPR); 5735 5736 *expr_p = t; 5737 if (want_value) 5738 return GS_OK; 5739 else 5740 { 5741 gimplify_stmt (expr_p, pre_p); 5742 return GS_ALL_DONE; 5743 } 5744 } 5745 5746 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to 5747 gimplify. After gimplification, EXPR_P will point to a new temporary 5748 that holds the original value of the SAVE_EXPR node. 5749 5750 PRE_P points to the list where side effects that must happen before 5751 *EXPR_P should be stored. */ 5752 5753 static enum gimplify_status 5754 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 5755 { 5756 enum gimplify_status ret = GS_ALL_DONE; 5757 tree val; 5758 5759 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR); 5760 val = TREE_OPERAND (*expr_p, 0); 5761 5762 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */ 5763 if (!SAVE_EXPR_RESOLVED_P (*expr_p)) 5764 { 5765 /* The operand may be a void-valued expression such as SAVE_EXPRs 5766 generated by the Java frontend for class initialization. It is 5767 being executed only for its side-effects. */ 5768 if (TREE_TYPE (val) == void_type_node) 5769 { 5770 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 5771 is_gimple_stmt, fb_none); 5772 val = NULL; 5773 } 5774 else 5775 /* The temporary may not be an SSA name as later abnormal and EH 5776 control flow may invalidate use/def domination. When in SSA 5777 form then assume there are no such issues and SAVE_EXPRs only 5778 appear via GENERIC foldings. */ 5779 val = get_initialized_tmp_var (val, pre_p, post_p, 5780 gimple_in_ssa_p (cfun)); 5781 5782 TREE_OPERAND (*expr_p, 0) = val; 5783 SAVE_EXPR_RESOLVED_P (*expr_p) = 1; 5784 } 5785 5786 *expr_p = val; 5787 5788 return ret; 5789 } 5790 5791 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P 5792 5793 unary_expr 5794 : ... 5795 | '&' varname 5796 ... 5797 5798 PRE_P points to the list where side effects that must happen before 5799 *EXPR_P should be stored. 5800 5801 POST_P points to the list where side effects that must happen after 5802 *EXPR_P should be stored. */ 5803 5804 static enum gimplify_status 5805 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 5806 { 5807 tree expr = *expr_p; 5808 tree op0 = TREE_OPERAND (expr, 0); 5809 enum gimplify_status ret; 5810 location_t loc = EXPR_LOCATION (*expr_p); 5811 5812 switch (TREE_CODE (op0)) 5813 { 5814 case INDIRECT_REF: 5815 do_indirect_ref: 5816 /* Check if we are dealing with an expression of the form '&*ptr'. 5817 While the front end folds away '&*ptr' into 'ptr', these 5818 expressions may be generated internally by the compiler (e.g., 5819 builtins like __builtin_va_end). */ 5820 /* Caution: the silent array decomposition semantics we allow for 5821 ADDR_EXPR means we can't always discard the pair. */ 5822 /* Gimplification of the ADDR_EXPR operand may drop 5823 cv-qualification conversions, so make sure we add them if 5824 needed. */ 5825 { 5826 tree op00 = TREE_OPERAND (op0, 0); 5827 tree t_expr = TREE_TYPE (expr); 5828 tree t_op00 = TREE_TYPE (op00); 5829 5830 if (!useless_type_conversion_p (t_expr, t_op00)) 5831 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00); 5832 *expr_p = op00; 5833 ret = GS_OK; 5834 } 5835 break; 5836 5837 case VIEW_CONVERT_EXPR: 5838 /* Take the address of our operand and then convert it to the type of 5839 this ADDR_EXPR. 5840 5841 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at 5842 all clear. The impact of this transformation is even less clear. */ 5843 5844 /* If the operand is a useless conversion, look through it. Doing so 5845 guarantees that the ADDR_EXPR and its operand will remain of the 5846 same type. */ 5847 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0))) 5848 op0 = TREE_OPERAND (op0, 0); 5849 5850 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr), 5851 build_fold_addr_expr_loc (loc, 5852 TREE_OPERAND (op0, 0))); 5853 ret = GS_OK; 5854 break; 5855 5856 case MEM_REF: 5857 if (integer_zerop (TREE_OPERAND (op0, 1))) 5858 goto do_indirect_ref; 5859 5860 /* fall through */ 5861 5862 default: 5863 /* If we see a call to a declared builtin or see its address 5864 being taken (we can unify those cases here) then we can mark 5865 the builtin for implicit generation by GCC. */ 5866 if (TREE_CODE (op0) == FUNCTION_DECL 5867 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL 5868 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0))) 5869 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true); 5870 5871 /* We use fb_either here because the C frontend sometimes takes 5872 the address of a call that returns a struct; see 5873 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make 5874 the implied temporary explicit. */ 5875 5876 /* Make the operand addressable. */ 5877 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p, 5878 is_gimple_addressable, fb_either); 5879 if (ret == GS_ERROR) 5880 break; 5881 5882 /* Then mark it. Beware that it may not be possible to do so directly 5883 if a temporary has been created by the gimplification. */ 5884 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p); 5885 5886 op0 = TREE_OPERAND (expr, 0); 5887 5888 /* For various reasons, the gimplification of the expression 5889 may have made a new INDIRECT_REF. */ 5890 if (TREE_CODE (op0) == INDIRECT_REF) 5891 goto do_indirect_ref; 5892 5893 mark_addressable (TREE_OPERAND (expr, 0)); 5894 5895 /* The FEs may end up building ADDR_EXPRs early on a decl with 5896 an incomplete type. Re-build ADDR_EXPRs in canonical form 5897 here. */ 5898 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr)))) 5899 *expr_p = build_fold_addr_expr (op0); 5900 5901 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */ 5902 recompute_tree_invariant_for_addr_expr (*expr_p); 5903 5904 /* If we re-built the ADDR_EXPR add a conversion to the original type 5905 if required. */ 5906 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p))) 5907 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p); 5908 5909 break; 5910 } 5911 5912 return ret; 5913 } 5914 5915 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple 5916 value; output operands should be a gimple lvalue. */ 5917 5918 static enum gimplify_status 5919 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 5920 { 5921 tree expr; 5922 int noutputs; 5923 const char **oconstraints; 5924 int i; 5925 tree link; 5926 const char *constraint; 5927 bool allows_mem, allows_reg, is_inout; 5928 enum gimplify_status ret, tret; 5929 gasm *stmt; 5930 vec<tree, va_gc> *inputs; 5931 vec<tree, va_gc> *outputs; 5932 vec<tree, va_gc> *clobbers; 5933 vec<tree, va_gc> *labels; 5934 tree link_next; 5935 5936 expr = *expr_p; 5937 noutputs = list_length (ASM_OUTPUTS (expr)); 5938 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); 5939 5940 inputs = NULL; 5941 outputs = NULL; 5942 clobbers = NULL; 5943 labels = NULL; 5944 5945 ret = GS_ALL_DONE; 5946 link_next = NULL_TREE; 5947 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next) 5948 { 5949 bool ok; 5950 size_t constraint_len; 5951 5952 link_next = TREE_CHAIN (link); 5953 5954 oconstraints[i] 5955 = constraint 5956 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 5957 constraint_len = strlen (constraint); 5958 if (constraint_len == 0) 5959 continue; 5960 5961 ok = parse_output_constraint (&constraint, i, 0, 0, 5962 &allows_mem, &allows_reg, &is_inout); 5963 if (!ok) 5964 { 5965 ret = GS_ERROR; 5966 is_inout = false; 5967 } 5968 5969 if (!allows_reg && allows_mem) 5970 mark_addressable (TREE_VALUE (link)); 5971 5972 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, 5973 is_inout ? is_gimple_min_lval : is_gimple_lvalue, 5974 fb_lvalue | fb_mayfail); 5975 if (tret == GS_ERROR) 5976 { 5977 error ("invalid lvalue in asm output %d", i); 5978 ret = tret; 5979 } 5980 5981 /* If the constraint does not allow memory make sure we gimplify 5982 it to a register if it is not already but its base is. This 5983 happens for complex and vector components. */ 5984 if (!allows_mem) 5985 { 5986 tree op = TREE_VALUE (link); 5987 if (! is_gimple_val (op) 5988 && is_gimple_reg_type (TREE_TYPE (op)) 5989 && is_gimple_reg (get_base_address (op))) 5990 { 5991 tree tem = create_tmp_reg (TREE_TYPE (op)); 5992 tree ass; 5993 if (is_inout) 5994 { 5995 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), 5996 tem, unshare_expr (op)); 5997 gimplify_and_add (ass, pre_p); 5998 } 5999 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem); 6000 gimplify_and_add (ass, post_p); 6001 6002 TREE_VALUE (link) = tem; 6003 tret = GS_OK; 6004 } 6005 } 6006 6007 vec_safe_push (outputs, link); 6008 TREE_CHAIN (link) = NULL_TREE; 6009 6010 if (is_inout) 6011 { 6012 /* An input/output operand. To give the optimizers more 6013 flexibility, split it into separate input and output 6014 operands. */ 6015 tree input; 6016 /* Buffer big enough to format a 32-bit UINT_MAX into. */ 6017 char buf[11]; 6018 6019 /* Turn the in/out constraint into an output constraint. */ 6020 char *p = xstrdup (constraint); 6021 p[0] = '='; 6022 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p); 6023 6024 /* And add a matching input constraint. */ 6025 if (allows_reg) 6026 { 6027 sprintf (buf, "%u", i); 6028 6029 /* If there are multiple alternatives in the constraint, 6030 handle each of them individually. Those that allow register 6031 will be replaced with operand number, the others will stay 6032 unchanged. */ 6033 if (strchr (p, ',') != NULL) 6034 { 6035 size_t len = 0, buflen = strlen (buf); 6036 char *beg, *end, *str, *dst; 6037 6038 for (beg = p + 1;;) 6039 { 6040 end = strchr (beg, ','); 6041 if (end == NULL) 6042 end = strchr (beg, '\0'); 6043 if ((size_t) (end - beg) < buflen) 6044 len += buflen + 1; 6045 else 6046 len += end - beg + 1; 6047 if (*end) 6048 beg = end + 1; 6049 else 6050 break; 6051 } 6052 6053 str = (char *) alloca (len); 6054 for (beg = p + 1, dst = str;;) 6055 { 6056 const char *tem; 6057 bool mem_p, reg_p, inout_p; 6058 6059 end = strchr (beg, ','); 6060 if (end) 6061 *end = '\0'; 6062 beg[-1] = '='; 6063 tem = beg - 1; 6064 parse_output_constraint (&tem, i, 0, 0, 6065 &mem_p, ®_p, &inout_p); 6066 if (dst != str) 6067 *dst++ = ','; 6068 if (reg_p) 6069 { 6070 memcpy (dst, buf, buflen); 6071 dst += buflen; 6072 } 6073 else 6074 { 6075 if (end) 6076 len = end - beg; 6077 else 6078 len = strlen (beg); 6079 memcpy (dst, beg, len); 6080 dst += len; 6081 } 6082 if (end) 6083 beg = end + 1; 6084 else 6085 break; 6086 } 6087 *dst = '\0'; 6088 input = build_string (dst - str, str); 6089 } 6090 else 6091 input = build_string (strlen (buf), buf); 6092 } 6093 else 6094 input = build_string (constraint_len - 1, constraint + 1); 6095 6096 free (p); 6097 6098 input = build_tree_list (build_tree_list (NULL_TREE, input), 6099 unshare_expr (TREE_VALUE (link))); 6100 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input); 6101 } 6102 } 6103 6104 link_next = NULL_TREE; 6105 for (link = ASM_INPUTS (expr); link; ++i, link = link_next) 6106 { 6107 link_next = TREE_CHAIN (link); 6108 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 6109 parse_input_constraint (&constraint, 0, 0, noutputs, 0, 6110 oconstraints, &allows_mem, &allows_reg); 6111 6112 /* If we can't make copies, we can only accept memory. */ 6113 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link)))) 6114 { 6115 if (allows_mem) 6116 allows_reg = 0; 6117 else 6118 { 6119 error ("impossible constraint in %<asm%>"); 6120 error ("non-memory input %d must stay in memory", i); 6121 return GS_ERROR; 6122 } 6123 } 6124 6125 /* If the operand is a memory input, it should be an lvalue. */ 6126 if (!allows_reg && allows_mem) 6127 { 6128 tree inputv = TREE_VALUE (link); 6129 STRIP_NOPS (inputv); 6130 if (TREE_CODE (inputv) == PREDECREMENT_EXPR 6131 || TREE_CODE (inputv) == PREINCREMENT_EXPR 6132 || TREE_CODE (inputv) == POSTDECREMENT_EXPR 6133 || TREE_CODE (inputv) == POSTINCREMENT_EXPR 6134 || TREE_CODE (inputv) == MODIFY_EXPR) 6135 TREE_VALUE (link) = error_mark_node; 6136 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, 6137 is_gimple_lvalue, fb_lvalue | fb_mayfail); 6138 if (tret != GS_ERROR) 6139 { 6140 /* Unlike output operands, memory inputs are not guaranteed 6141 to be lvalues by the FE, and while the expressions are 6142 marked addressable there, if it is e.g. a statement 6143 expression, temporaries in it might not end up being 6144 addressable. They might be already used in the IL and thus 6145 it is too late to make them addressable now though. */ 6146 tree x = TREE_VALUE (link); 6147 while (handled_component_p (x)) 6148 x = TREE_OPERAND (x, 0); 6149 if (TREE_CODE (x) == MEM_REF 6150 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR) 6151 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0); 6152 if ((VAR_P (x) 6153 || TREE_CODE (x) == PARM_DECL 6154 || TREE_CODE (x) == RESULT_DECL) 6155 && !TREE_ADDRESSABLE (x) 6156 && is_gimple_reg (x)) 6157 { 6158 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), 6159 input_location), 0, 6160 "memory input %d is not directly addressable", 6161 i); 6162 prepare_gimple_addressable (&TREE_VALUE (link), pre_p); 6163 } 6164 } 6165 mark_addressable (TREE_VALUE (link)); 6166 if (tret == GS_ERROR) 6167 { 6168 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location), 6169 "memory input %d is not directly addressable", i); 6170 ret = tret; 6171 } 6172 } 6173 else 6174 { 6175 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, 6176 is_gimple_asm_val, fb_rvalue); 6177 if (tret == GS_ERROR) 6178 ret = tret; 6179 } 6180 6181 TREE_CHAIN (link) = NULL_TREE; 6182 vec_safe_push (inputs, link); 6183 } 6184 6185 link_next = NULL_TREE; 6186 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next) 6187 { 6188 link_next = TREE_CHAIN (link); 6189 TREE_CHAIN (link) = NULL_TREE; 6190 vec_safe_push (clobbers, link); 6191 } 6192 6193 link_next = NULL_TREE; 6194 for (link = ASM_LABELS (expr); link; ++i, link = link_next) 6195 { 6196 link_next = TREE_CHAIN (link); 6197 TREE_CHAIN (link) = NULL_TREE; 6198 vec_safe_push (labels, link); 6199 } 6200 6201 /* Do not add ASMs with errors to the gimple IL stream. */ 6202 if (ret != GS_ERROR) 6203 { 6204 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)), 6205 inputs, outputs, clobbers, labels); 6206 6207 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0); 6208 gimple_asm_set_input (stmt, ASM_INPUT_P (expr)); 6209 6210 gimplify_seq_add_stmt (pre_p, stmt); 6211 } 6212 6213 return ret; 6214 } 6215 6216 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding 6217 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while 6218 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we 6219 return to this function. 6220 6221 FIXME should we complexify the prequeue handling instead? Or use flags 6222 for all the cleanups and let the optimizer tighten them up? The current 6223 code seems pretty fragile; it will break on a cleanup within any 6224 non-conditional nesting. But any such nesting would be broken, anyway; 6225 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct 6226 and continues out of it. We can do that at the RTL level, though, so 6227 having an optimizer to tighten up try/finally regions would be a Good 6228 Thing. */ 6229 6230 static enum gimplify_status 6231 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p) 6232 { 6233 gimple_stmt_iterator iter; 6234 gimple_seq body_sequence = NULL; 6235 6236 tree temp = voidify_wrapper_expr (*expr_p, NULL); 6237 6238 /* We only care about the number of conditions between the innermost 6239 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and 6240 any cleanups collected outside the CLEANUP_POINT_EXPR. */ 6241 int old_conds = gimplify_ctxp->conditions; 6242 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups; 6243 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr; 6244 gimplify_ctxp->conditions = 0; 6245 gimplify_ctxp->conditional_cleanups = NULL; 6246 gimplify_ctxp->in_cleanup_point_expr = true; 6247 6248 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence); 6249 6250 gimplify_ctxp->conditions = old_conds; 6251 gimplify_ctxp->conditional_cleanups = old_cleanups; 6252 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr; 6253 6254 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); ) 6255 { 6256 gimple *wce = gsi_stmt (iter); 6257 6258 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR) 6259 { 6260 if (gsi_one_before_end_p (iter)) 6261 { 6262 /* Note that gsi_insert_seq_before and gsi_remove do not 6263 scan operands, unlike some other sequence mutators. */ 6264 if (!gimple_wce_cleanup_eh_only (wce)) 6265 gsi_insert_seq_before_without_update (&iter, 6266 gimple_wce_cleanup (wce), 6267 GSI_SAME_STMT); 6268 gsi_remove (&iter, true); 6269 break; 6270 } 6271 else 6272 { 6273 gtry *gtry; 6274 gimple_seq seq; 6275 enum gimple_try_flags kind; 6276 6277 if (gimple_wce_cleanup_eh_only (wce)) 6278 kind = GIMPLE_TRY_CATCH; 6279 else 6280 kind = GIMPLE_TRY_FINALLY; 6281 seq = gsi_split_seq_after (iter); 6282 6283 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind); 6284 /* Do not use gsi_replace here, as it may scan operands. 6285 We want to do a simple structural modification only. */ 6286 gsi_set_stmt (&iter, gtry); 6287 iter = gsi_start (gtry->eval); 6288 } 6289 } 6290 else 6291 gsi_next (&iter); 6292 } 6293 6294 gimplify_seq_add_seq (pre_p, body_sequence); 6295 if (temp) 6296 { 6297 *expr_p = temp; 6298 return GS_OK; 6299 } 6300 else 6301 { 6302 *expr_p = NULL; 6303 return GS_ALL_DONE; 6304 } 6305 } 6306 6307 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP 6308 is the cleanup action required. EH_ONLY is true if the cleanup should 6309 only be executed if an exception is thrown, not on normal exit. 6310 If FORCE_UNCOND is true perform the cleanup unconditionally; this is 6311 only valid for clobbers. */ 6312 6313 static void 6314 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p, 6315 bool force_uncond = false) 6316 { 6317 gimple *wce; 6318 gimple_seq cleanup_stmts = NULL; 6319 6320 /* Errors can result in improperly nested cleanups. Which results in 6321 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */ 6322 if (seen_error ()) 6323 return; 6324 6325 if (gimple_conditional_context ()) 6326 { 6327 /* If we're in a conditional context, this is more complex. We only 6328 want to run the cleanup if we actually ran the initialization that 6329 necessitates it, but we want to run it after the end of the 6330 conditional context. So we wrap the try/finally around the 6331 condition and use a flag to determine whether or not to actually 6332 run the destructor. Thus 6333 6334 test ? f(A()) : 0 6335 6336 becomes (approximately) 6337 6338 flag = 0; 6339 try { 6340 if (test) { A::A(temp); flag = 1; val = f(temp); } 6341 else { val = 0; } 6342 } finally { 6343 if (flag) A::~A(temp); 6344 } 6345 val 6346 */ 6347 if (force_uncond) 6348 { 6349 gimplify_stmt (&cleanup, &cleanup_stmts); 6350 wce = gimple_build_wce (cleanup_stmts); 6351 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce); 6352 } 6353 else 6354 { 6355 tree flag = create_tmp_var (boolean_type_node, "cleanup"); 6356 gassign *ffalse = gimple_build_assign (flag, boolean_false_node); 6357 gassign *ftrue = gimple_build_assign (flag, boolean_true_node); 6358 6359 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL); 6360 gimplify_stmt (&cleanup, &cleanup_stmts); 6361 wce = gimple_build_wce (cleanup_stmts); 6362 6363 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse); 6364 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce); 6365 gimplify_seq_add_stmt (pre_p, ftrue); 6366 6367 /* Because of this manipulation, and the EH edges that jump 6368 threading cannot redirect, the temporary (VAR) will appear 6369 to be used uninitialized. Don't warn. */ 6370 TREE_NO_WARNING (var) = 1; 6371 } 6372 } 6373 else 6374 { 6375 gimplify_stmt (&cleanup, &cleanup_stmts); 6376 wce = gimple_build_wce (cleanup_stmts); 6377 gimple_wce_set_cleanup_eh_only (wce, eh_only); 6378 gimplify_seq_add_stmt (pre_p, wce); 6379 } 6380 } 6381 6382 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */ 6383 6384 static enum gimplify_status 6385 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 6386 { 6387 tree targ = *expr_p; 6388 tree temp = TARGET_EXPR_SLOT (targ); 6389 tree init = TARGET_EXPR_INITIAL (targ); 6390 enum gimplify_status ret; 6391 6392 bool unpoison_empty_seq = false; 6393 gimple_stmt_iterator unpoison_it; 6394 6395 if (init) 6396 { 6397 tree cleanup = NULL_TREE; 6398 6399 /* TARGET_EXPR temps aren't part of the enclosing block, so add it 6400 to the temps list. Handle also variable length TARGET_EXPRs. */ 6401 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST) 6402 { 6403 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp))) 6404 gimplify_type_sizes (TREE_TYPE (temp), pre_p); 6405 gimplify_vla_decl (temp, pre_p); 6406 } 6407 else 6408 { 6409 /* Save location where we need to place unpoisoning. It's possible 6410 that a variable will be converted to needs_to_live_in_memory. */ 6411 unpoison_it = gsi_last (*pre_p); 6412 unpoison_empty_seq = gsi_end_p (unpoison_it); 6413 6414 gimple_add_tmp_var (temp); 6415 } 6416 6417 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the 6418 expression is supposed to initialize the slot. */ 6419 if (VOID_TYPE_P (TREE_TYPE (init))) 6420 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none); 6421 else 6422 { 6423 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init); 6424 init = init_expr; 6425 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none); 6426 init = NULL; 6427 ggc_free (init_expr); 6428 } 6429 if (ret == GS_ERROR) 6430 { 6431 /* PR c++/28266 Make sure this is expanded only once. */ 6432 TARGET_EXPR_INITIAL (targ) = NULL_TREE; 6433 return GS_ERROR; 6434 } 6435 if (init) 6436 gimplify_and_add (init, pre_p); 6437 6438 /* If needed, push the cleanup for the temp. */ 6439 if (TARGET_EXPR_CLEANUP (targ)) 6440 { 6441 if (CLEANUP_EH_ONLY (targ)) 6442 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ), 6443 CLEANUP_EH_ONLY (targ), pre_p); 6444 else 6445 cleanup = TARGET_EXPR_CLEANUP (targ); 6446 } 6447 6448 /* Add a clobber for the temporary going out of scope, like 6449 gimplify_bind_expr. */ 6450 if (gimplify_ctxp->in_cleanup_point_expr 6451 && needs_to_live_in_memory (temp)) 6452 { 6453 if (flag_stack_reuse == SR_ALL) 6454 { 6455 tree clobber = build_constructor (TREE_TYPE (temp), 6456 NULL); 6457 TREE_THIS_VOLATILE (clobber) = true; 6458 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber); 6459 gimple_push_cleanup (temp, clobber, false, pre_p, true); 6460 } 6461 if (asan_poisoned_variables && dbg_cnt (asan_use_after_scope) 6462 && !gimplify_omp_ctxp) 6463 { 6464 tree asan_cleanup = build_asan_poison_call_expr (temp); 6465 if (asan_cleanup) 6466 { 6467 if (unpoison_empty_seq) 6468 unpoison_it = gsi_start (*pre_p); 6469 6470 asan_poison_variable (temp, false, &unpoison_it, 6471 unpoison_empty_seq); 6472 gimple_push_cleanup (temp, asan_cleanup, false, pre_p); 6473 } 6474 } 6475 } 6476 if (cleanup) 6477 gimple_push_cleanup (temp, cleanup, false, pre_p); 6478 6479 /* Only expand this once. */ 6480 TREE_OPERAND (targ, 3) = init; 6481 TARGET_EXPR_INITIAL (targ) = NULL_TREE; 6482 } 6483 else 6484 /* We should have expanded this before. */ 6485 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp)); 6486 6487 *expr_p = temp; 6488 return GS_OK; 6489 } 6490 6491 /* Gimplification of expression trees. */ 6492 6493 /* Gimplify an expression which appears at statement context. The 6494 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is 6495 NULL, a new sequence is allocated. 6496 6497 Return true if we actually added a statement to the queue. */ 6498 6499 bool 6500 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p) 6501 { 6502 gimple_seq_node last; 6503 6504 last = gimple_seq_last (*seq_p); 6505 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none); 6506 return last != gimple_seq_last (*seq_p); 6507 } 6508 6509 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels 6510 to CTX. If entries already exist, force them to be some flavor of private. 6511 If there is no enclosing parallel, do nothing. */ 6512 6513 void 6514 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl) 6515 { 6516 splay_tree_node n; 6517 6518 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE) 6519 return; 6520 6521 do 6522 { 6523 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 6524 if (n != NULL) 6525 { 6526 if (n->value & GOVD_SHARED) 6527 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN); 6528 else if (n->value & GOVD_MAP) 6529 n->value |= GOVD_MAP_TO_ONLY; 6530 else 6531 return; 6532 } 6533 else if ((ctx->region_type & ORT_TARGET) != 0) 6534 { 6535 if (ctx->target_map_scalars_firstprivate) 6536 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE); 6537 else 6538 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY); 6539 } 6540 else if (ctx->region_type != ORT_WORKSHARE 6541 && ctx->region_type != ORT_SIMD 6542 && ctx->region_type != ORT_ACC 6543 && !(ctx->region_type & ORT_TARGET_DATA)) 6544 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE); 6545 6546 ctx = ctx->outer_context; 6547 } 6548 while (ctx); 6549 } 6550 6551 /* Similarly for each of the type sizes of TYPE. */ 6552 6553 static void 6554 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type) 6555 { 6556 if (type == NULL || type == error_mark_node) 6557 return; 6558 type = TYPE_MAIN_VARIANT (type); 6559 6560 if (ctx->privatized_types->add (type)) 6561 return; 6562 6563 switch (TREE_CODE (type)) 6564 { 6565 case INTEGER_TYPE: 6566 case ENUMERAL_TYPE: 6567 case BOOLEAN_TYPE: 6568 case REAL_TYPE: 6569 case FIXED_POINT_TYPE: 6570 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type)); 6571 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type)); 6572 break; 6573 6574 case ARRAY_TYPE: 6575 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type)); 6576 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type)); 6577 break; 6578 6579 case RECORD_TYPE: 6580 case UNION_TYPE: 6581 case QUAL_UNION_TYPE: 6582 { 6583 tree field; 6584 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) 6585 if (TREE_CODE (field) == FIELD_DECL) 6586 { 6587 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field)); 6588 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field)); 6589 } 6590 } 6591 break; 6592 6593 case POINTER_TYPE: 6594 case REFERENCE_TYPE: 6595 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type)); 6596 break; 6597 6598 default: 6599 break; 6600 } 6601 6602 omp_firstprivatize_variable (ctx, TYPE_SIZE (type)); 6603 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type)); 6604 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type); 6605 } 6606 6607 /* Add an entry for DECL in the OMP context CTX with FLAGS. */ 6608 6609 static void 6610 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags) 6611 { 6612 splay_tree_node n; 6613 unsigned int nflags; 6614 tree t; 6615 6616 if (error_operand_p (decl) || ctx->region_type == ORT_NONE) 6617 return; 6618 6619 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means 6620 there are constructors involved somewhere. Exception is a shared clause, 6621 there is nothing privatized in that case. */ 6622 if ((flags & GOVD_SHARED) == 0 6623 && (TREE_ADDRESSABLE (TREE_TYPE (decl)) 6624 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))) 6625 flags |= GOVD_SEEN; 6626 6627 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 6628 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0) 6629 { 6630 /* We shouldn't be re-adding the decl with the same data 6631 sharing class. */ 6632 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0); 6633 nflags = n->value | flags; 6634 /* The only combination of data sharing classes we should see is 6635 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits 6636 reduction variables to be used in data sharing clauses. */ 6637 gcc_assert ((ctx->region_type & ORT_ACC) != 0 6638 || ((nflags & GOVD_DATA_SHARE_CLASS) 6639 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)) 6640 || (flags & GOVD_DATA_SHARE_CLASS) == 0); 6641 n->value = nflags; 6642 return; 6643 } 6644 6645 /* When adding a variable-sized variable, we have to handle all sorts 6646 of additional bits of data: the pointer replacement variable, and 6647 the parameters of the type. */ 6648 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 6649 { 6650 /* Add the pointer replacement variable as PRIVATE if the variable 6651 replacement is private, else FIRSTPRIVATE since we'll need the 6652 address of the original variable either for SHARED, or for the 6653 copy into or out of the context. */ 6654 if (!(flags & GOVD_LOCAL)) 6655 { 6656 if (flags & GOVD_MAP) 6657 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT; 6658 else if (flags & GOVD_PRIVATE) 6659 nflags = GOVD_PRIVATE; 6660 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0 6661 && (flags & GOVD_FIRSTPRIVATE)) 6662 nflags = GOVD_PRIVATE | GOVD_EXPLICIT; 6663 else 6664 nflags = GOVD_FIRSTPRIVATE; 6665 nflags |= flags & GOVD_SEEN; 6666 t = DECL_VALUE_EXPR (decl); 6667 gcc_assert (TREE_CODE (t) == INDIRECT_REF); 6668 t = TREE_OPERAND (t, 0); 6669 gcc_assert (DECL_P (t)); 6670 omp_add_variable (ctx, t, nflags); 6671 } 6672 6673 /* Add all of the variable and type parameters (which should have 6674 been gimplified to a formal temporary) as FIRSTPRIVATE. */ 6675 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl)); 6676 omp_firstprivatize_variable (ctx, DECL_SIZE (decl)); 6677 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl)); 6678 6679 /* The variable-sized variable itself is never SHARED, only some form 6680 of PRIVATE. The sharing would take place via the pointer variable 6681 which we remapped above. */ 6682 if (flags & GOVD_SHARED) 6683 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE 6684 | (flags & (GOVD_SEEN | GOVD_EXPLICIT)); 6685 6686 /* We're going to make use of the TYPE_SIZE_UNIT at least in the 6687 alloca statement we generate for the variable, so make sure it 6688 is available. This isn't automatically needed for the SHARED 6689 case, since we won't be allocating local storage then. 6690 For local variables TYPE_SIZE_UNIT might not be gimplified yet, 6691 in this case omp_notice_variable will be called later 6692 on when it is gimplified. */ 6693 else if (! (flags & (GOVD_LOCAL | GOVD_MAP)) 6694 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl)))) 6695 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true); 6696 } 6697 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0 6698 && lang_hooks.decls.omp_privatize_by_reference (decl)) 6699 { 6700 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl)); 6701 6702 /* Similar to the direct variable sized case above, we'll need the 6703 size of references being privatized. */ 6704 if ((flags & GOVD_SHARED) == 0) 6705 { 6706 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))); 6707 if (DECL_P (t)) 6708 omp_notice_variable (ctx, t, true); 6709 } 6710 } 6711 6712 if (n != NULL) 6713 n->value |= flags; 6714 else 6715 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags); 6716 6717 /* For reductions clauses in OpenACC loop directives, by default create a 6718 copy clause on the enclosing parallel construct for carrying back the 6719 results. */ 6720 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION)) 6721 { 6722 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context; 6723 while (outer_ctx) 6724 { 6725 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl); 6726 if (n != NULL) 6727 { 6728 /* Ignore local variables and explicitly declared clauses. */ 6729 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT)) 6730 break; 6731 else if (outer_ctx->region_type == ORT_ACC_KERNELS) 6732 { 6733 /* According to the OpenACC spec, such a reduction variable 6734 should already have a copy map on a kernels construct, 6735 verify that here. */ 6736 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE) 6737 && (n->value & GOVD_MAP)); 6738 } 6739 else if (outer_ctx->region_type == ORT_ACC_PARALLEL) 6740 { 6741 /* Remove firstprivate and make it a copy map. */ 6742 n->value &= ~GOVD_FIRSTPRIVATE; 6743 n->value |= GOVD_MAP; 6744 } 6745 } 6746 else if (outer_ctx->region_type == ORT_ACC_PARALLEL) 6747 { 6748 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl, 6749 GOVD_MAP | GOVD_SEEN); 6750 break; 6751 } 6752 outer_ctx = outer_ctx->outer_context; 6753 } 6754 } 6755 } 6756 6757 /* Notice a threadprivate variable DECL used in OMP context CTX. 6758 This just prints out diagnostics about threadprivate variable uses 6759 in untied tasks. If DECL2 is non-NULL, prevent this warning 6760 on that variable. */ 6761 6762 static bool 6763 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl, 6764 tree decl2) 6765 { 6766 splay_tree_node n; 6767 struct gimplify_omp_ctx *octx; 6768 6769 for (octx = ctx; octx; octx = octx->outer_context) 6770 if ((octx->region_type & ORT_TARGET) != 0) 6771 { 6772 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl); 6773 if (n == NULL) 6774 { 6775 error ("threadprivate variable %qE used in target region", 6776 DECL_NAME (decl)); 6777 error_at (octx->location, "enclosing target region"); 6778 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0); 6779 } 6780 if (decl2) 6781 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0); 6782 } 6783 6784 if (ctx->region_type != ORT_UNTIED_TASK) 6785 return false; 6786 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 6787 if (n == NULL) 6788 { 6789 error ("threadprivate variable %qE used in untied task", 6790 DECL_NAME (decl)); 6791 error_at (ctx->location, "enclosing task"); 6792 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0); 6793 } 6794 if (decl2) 6795 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0); 6796 return false; 6797 } 6798 6799 /* Return true if global var DECL is device resident. */ 6800 6801 static bool 6802 device_resident_p (tree decl) 6803 { 6804 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl)); 6805 6806 if (!attr) 6807 return false; 6808 6809 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t)) 6810 { 6811 tree c = TREE_VALUE (t); 6812 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT) 6813 return true; 6814 } 6815 6816 return false; 6817 } 6818 6819 /* Return true if DECL has an ACC DECLARE attribute. */ 6820 6821 static bool 6822 is_oacc_declared (tree decl) 6823 { 6824 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl; 6825 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t)); 6826 return declared != NULL_TREE; 6827 } 6828 6829 /* Determine outer default flags for DECL mentioned in an OMP region 6830 but not declared in an enclosing clause. 6831 6832 ??? Some compiler-generated variables (like SAVE_EXPRs) could be 6833 remapped firstprivate instead of shared. To some extent this is 6834 addressed in omp_firstprivatize_type_sizes, but not 6835 effectively. */ 6836 6837 static unsigned 6838 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl, 6839 bool in_code, unsigned flags) 6840 { 6841 enum omp_clause_default_kind default_kind = ctx->default_kind; 6842 enum omp_clause_default_kind kind; 6843 6844 kind = lang_hooks.decls.omp_predetermined_sharing (decl); 6845 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED) 6846 default_kind = kind; 6847 6848 switch (default_kind) 6849 { 6850 case OMP_CLAUSE_DEFAULT_NONE: 6851 { 6852 const char *rtype; 6853 6854 if (ctx->region_type & ORT_PARALLEL) 6855 rtype = "parallel"; 6856 else if (ctx->region_type & ORT_TASK) 6857 rtype = "task"; 6858 else if (ctx->region_type & ORT_TEAMS) 6859 rtype = "teams"; 6860 else 6861 gcc_unreachable (); 6862 6863 error ("%qE not specified in enclosing %qs", 6864 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype); 6865 error_at (ctx->location, "enclosing %qs", rtype); 6866 } 6867 /* FALLTHRU */ 6868 case OMP_CLAUSE_DEFAULT_SHARED: 6869 flags |= GOVD_SHARED; 6870 break; 6871 case OMP_CLAUSE_DEFAULT_PRIVATE: 6872 flags |= GOVD_PRIVATE; 6873 break; 6874 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE: 6875 flags |= GOVD_FIRSTPRIVATE; 6876 break; 6877 case OMP_CLAUSE_DEFAULT_UNSPECIFIED: 6878 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */ 6879 gcc_assert ((ctx->region_type & ORT_TASK) != 0); 6880 if (struct gimplify_omp_ctx *octx = ctx->outer_context) 6881 { 6882 omp_notice_variable (octx, decl, in_code); 6883 for (; octx; octx = octx->outer_context) 6884 { 6885 splay_tree_node n2; 6886 6887 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl); 6888 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0 6889 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0)) 6890 continue; 6891 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED) 6892 { 6893 flags |= GOVD_FIRSTPRIVATE; 6894 goto found_outer; 6895 } 6896 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0) 6897 { 6898 flags |= GOVD_SHARED; 6899 goto found_outer; 6900 } 6901 } 6902 } 6903 6904 if (TREE_CODE (decl) == PARM_DECL 6905 || (!is_global_var (decl) 6906 && DECL_CONTEXT (decl) == current_function_decl)) 6907 flags |= GOVD_FIRSTPRIVATE; 6908 else 6909 flags |= GOVD_SHARED; 6910 found_outer: 6911 break; 6912 6913 default: 6914 gcc_unreachable (); 6915 } 6916 6917 return flags; 6918 } 6919 6920 6921 /* Determine outer default flags for DECL mentioned in an OACC region 6922 but not declared in an enclosing clause. */ 6923 6924 static unsigned 6925 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags) 6926 { 6927 const char *rkind; 6928 bool on_device = false; 6929 bool declared = is_oacc_declared (decl); 6930 tree type = TREE_TYPE (decl); 6931 6932 if (lang_hooks.decls.omp_privatize_by_reference (decl)) 6933 type = TREE_TYPE (type); 6934 6935 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0 6936 && is_global_var (decl) 6937 && device_resident_p (decl)) 6938 { 6939 on_device = true; 6940 flags |= GOVD_MAP_TO_ONLY; 6941 } 6942 6943 switch (ctx->region_type) 6944 { 6945 default: 6946 gcc_unreachable (); 6947 6948 case ORT_ACC_KERNELS: 6949 /* Scalars are default 'copy' under kernels, non-scalars are default 6950 'present_or_copy'. */ 6951 flags |= GOVD_MAP; 6952 if (!AGGREGATE_TYPE_P (type)) 6953 flags |= GOVD_MAP_FORCE; 6954 6955 rkind = "kernels"; 6956 break; 6957 6958 case ORT_ACC_PARALLEL: 6959 { 6960 if (on_device || AGGREGATE_TYPE_P (type) || declared) 6961 /* Aggregates default to 'present_or_copy'. */ 6962 flags |= GOVD_MAP; 6963 else 6964 /* Scalars default to 'firstprivate'. */ 6965 flags |= GOVD_FIRSTPRIVATE; 6966 rkind = "parallel"; 6967 } 6968 break; 6969 } 6970 6971 if (DECL_ARTIFICIAL (decl)) 6972 ; /* We can get compiler-generated decls, and should not complain 6973 about them. */ 6974 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE) 6975 { 6976 error ("%qE not specified in enclosing OpenACC %qs construct", 6977 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind); 6978 inform (ctx->location, "enclosing OpenACC %qs construct", rkind); 6979 } 6980 else 6981 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED); 6982 6983 return flags; 6984 } 6985 6986 /* Record the fact that DECL was used within the OMP context CTX. 6987 IN_CODE is true when real code uses DECL, and false when we should 6988 merely emit default(none) errors. Return true if DECL is going to 6989 be remapped and thus DECL shouldn't be gimplified into its 6990 DECL_VALUE_EXPR (if any). */ 6991 6992 static bool 6993 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code) 6994 { 6995 splay_tree_node n; 6996 unsigned flags = in_code ? GOVD_SEEN : 0; 6997 bool ret = false, shared; 6998 6999 if (error_operand_p (decl)) 7000 return false; 7001 7002 if (ctx->region_type == ORT_NONE) 7003 return lang_hooks.decls.omp_disregard_value_expr (decl, false); 7004 7005 if (is_global_var (decl)) 7006 { 7007 /* Threadprivate variables are predetermined. */ 7008 if (DECL_THREAD_LOCAL_P (decl)) 7009 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE); 7010 7011 if (DECL_HAS_VALUE_EXPR_P (decl)) 7012 { 7013 tree value = get_base_address (DECL_VALUE_EXPR (decl)); 7014 7015 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value)) 7016 return omp_notice_threadprivate_variable (ctx, decl, value); 7017 } 7018 7019 if (gimplify_omp_ctxp->outer_context == NULL 7020 && VAR_P (decl) 7021 && oacc_get_fn_attrib (current_function_decl)) 7022 { 7023 location_t loc = DECL_SOURCE_LOCATION (decl); 7024 7025 if (lookup_attribute ("omp declare target link", 7026 DECL_ATTRIBUTES (decl))) 7027 { 7028 error_at (loc, 7029 "%qE with %<link%> clause used in %<routine%> function", 7030 DECL_NAME (decl)); 7031 return false; 7032 } 7033 else if (!lookup_attribute ("omp declare target", 7034 DECL_ATTRIBUTES (decl))) 7035 { 7036 error_at (loc, 7037 "%qE requires a %<declare%> directive for use " 7038 "in a %<routine%> function", DECL_NAME (decl)); 7039 return false; 7040 } 7041 } 7042 } 7043 7044 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 7045 if ((ctx->region_type & ORT_TARGET) != 0) 7046 { 7047 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true); 7048 if (n == NULL) 7049 { 7050 unsigned nflags = flags; 7051 if (ctx->target_map_pointers_as_0len_arrays 7052 || ctx->target_map_scalars_firstprivate) 7053 { 7054 bool is_declare_target = false; 7055 bool is_scalar = false; 7056 if (is_global_var (decl) 7057 && varpool_node::get_create (decl)->offloadable) 7058 { 7059 struct gimplify_omp_ctx *octx; 7060 for (octx = ctx->outer_context; 7061 octx; octx = octx->outer_context) 7062 { 7063 n = splay_tree_lookup (octx->variables, 7064 (splay_tree_key)decl); 7065 if (n 7066 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED 7067 && (n->value & GOVD_DATA_SHARE_CLASS) != 0) 7068 break; 7069 } 7070 is_declare_target = octx == NULL; 7071 } 7072 if (!is_declare_target && ctx->target_map_scalars_firstprivate) 7073 is_scalar = lang_hooks.decls.omp_scalar_p (decl); 7074 if (is_declare_target) 7075 ; 7076 else if (ctx->target_map_pointers_as_0len_arrays 7077 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE 7078 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE 7079 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) 7080 == POINTER_TYPE))) 7081 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY; 7082 else if (is_scalar) 7083 nflags |= GOVD_FIRSTPRIVATE; 7084 } 7085 7086 struct gimplify_omp_ctx *octx = ctx->outer_context; 7087 if ((ctx->region_type & ORT_ACC) && octx) 7088 { 7089 /* Look in outer OpenACC contexts, to see if there's a 7090 data attribute for this variable. */ 7091 omp_notice_variable (octx, decl, in_code); 7092 7093 for (; octx; octx = octx->outer_context) 7094 { 7095 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET))) 7096 break; 7097 splay_tree_node n2 7098 = splay_tree_lookup (octx->variables, 7099 (splay_tree_key) decl); 7100 if (n2) 7101 { 7102 if (octx->region_type == ORT_ACC_HOST_DATA) 7103 error ("variable %qE declared in enclosing " 7104 "%<host_data%> region", DECL_NAME (decl)); 7105 nflags |= GOVD_MAP; 7106 if (octx->region_type == ORT_ACC_DATA 7107 && (n2->value & GOVD_MAP_0LEN_ARRAY)) 7108 nflags |= GOVD_MAP_0LEN_ARRAY; 7109 goto found_outer; 7110 } 7111 } 7112 } 7113 7114 { 7115 tree type = TREE_TYPE (decl); 7116 7117 if (nflags == flags 7118 && gimplify_omp_ctxp->target_firstprivatize_array_bases 7119 && lang_hooks.decls.omp_privatize_by_reference (decl)) 7120 type = TREE_TYPE (type); 7121 if (nflags == flags 7122 && !lang_hooks.types.omp_mappable_type (type)) 7123 { 7124 error ("%qD referenced in target region does not have " 7125 "a mappable type", decl); 7126 nflags |= GOVD_MAP | GOVD_EXPLICIT; 7127 } 7128 else if (nflags == flags) 7129 { 7130 if ((ctx->region_type & ORT_ACC) != 0) 7131 nflags = oacc_default_clause (ctx, decl, flags); 7132 else 7133 nflags |= GOVD_MAP; 7134 } 7135 } 7136 found_outer: 7137 omp_add_variable (ctx, decl, nflags); 7138 } 7139 else 7140 { 7141 /* If nothing changed, there's nothing left to do. */ 7142 if ((n->value & flags) == flags) 7143 return ret; 7144 flags |= n->value; 7145 n->value = flags; 7146 } 7147 goto do_outer; 7148 } 7149 7150 if (n == NULL) 7151 { 7152 if (ctx->region_type == ORT_WORKSHARE 7153 || ctx->region_type == ORT_SIMD 7154 || ctx->region_type == ORT_ACC 7155 || (ctx->region_type & ORT_TARGET_DATA) != 0) 7156 goto do_outer; 7157 7158 flags = omp_default_clause (ctx, decl, in_code, flags); 7159 7160 if ((flags & GOVD_PRIVATE) 7161 && lang_hooks.decls.omp_private_outer_ref (decl)) 7162 flags |= GOVD_PRIVATE_OUTER_REF; 7163 7164 omp_add_variable (ctx, decl, flags); 7165 7166 shared = (flags & GOVD_SHARED) != 0; 7167 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared); 7168 goto do_outer; 7169 } 7170 7171 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0 7172 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN 7173 && DECL_SIZE (decl)) 7174 { 7175 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 7176 { 7177 splay_tree_node n2; 7178 tree t = DECL_VALUE_EXPR (decl); 7179 gcc_assert (TREE_CODE (t) == INDIRECT_REF); 7180 t = TREE_OPERAND (t, 0); 7181 gcc_assert (DECL_P (t)); 7182 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t); 7183 n2->value |= GOVD_SEEN; 7184 } 7185 else if (lang_hooks.decls.omp_privatize_by_reference (decl) 7186 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))) 7187 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))) 7188 != INTEGER_CST)) 7189 { 7190 splay_tree_node n2; 7191 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))); 7192 gcc_assert (DECL_P (t)); 7193 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t); 7194 if (n2) 7195 omp_notice_variable (ctx, t, true); 7196 } 7197 } 7198 7199 shared = ((flags | n->value) & GOVD_SHARED) != 0; 7200 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared); 7201 7202 /* If nothing changed, there's nothing left to do. */ 7203 if ((n->value & flags) == flags) 7204 return ret; 7205 flags |= n->value; 7206 n->value = flags; 7207 7208 do_outer: 7209 /* If the variable is private in the current context, then we don't 7210 need to propagate anything to an outer context. */ 7211 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF)) 7212 return ret; 7213 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER)) 7214 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER)) 7215 return ret; 7216 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE 7217 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER)) 7218 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER)) 7219 return ret; 7220 if (ctx->outer_context 7221 && omp_notice_variable (ctx->outer_context, decl, in_code)) 7222 return true; 7223 return ret; 7224 } 7225 7226 /* Verify that DECL is private within CTX. If there's specific information 7227 to the contrary in the innermost scope, generate an error. */ 7228 7229 static bool 7230 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd) 7231 { 7232 splay_tree_node n; 7233 7234 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 7235 if (n != NULL) 7236 { 7237 if (n->value & GOVD_SHARED) 7238 { 7239 if (ctx == gimplify_omp_ctxp) 7240 { 7241 if (simd) 7242 error ("iteration variable %qE is predetermined linear", 7243 DECL_NAME (decl)); 7244 else 7245 error ("iteration variable %qE should be private", 7246 DECL_NAME (decl)); 7247 n->value = GOVD_PRIVATE; 7248 return true; 7249 } 7250 else 7251 return false; 7252 } 7253 else if ((n->value & GOVD_EXPLICIT) != 0 7254 && (ctx == gimplify_omp_ctxp 7255 || (ctx->region_type == ORT_COMBINED_PARALLEL 7256 && gimplify_omp_ctxp->outer_context == ctx))) 7257 { 7258 if ((n->value & GOVD_FIRSTPRIVATE) != 0) 7259 error ("iteration variable %qE should not be firstprivate", 7260 DECL_NAME (decl)); 7261 else if ((n->value & GOVD_REDUCTION) != 0) 7262 error ("iteration variable %qE should not be reduction", 7263 DECL_NAME (decl)); 7264 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0) 7265 error ("iteration variable %qE should not be linear", 7266 DECL_NAME (decl)); 7267 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0) 7268 error ("iteration variable %qE should not be lastprivate", 7269 DECL_NAME (decl)); 7270 else if (simd && (n->value & GOVD_PRIVATE) != 0) 7271 error ("iteration variable %qE should not be private", 7272 DECL_NAME (decl)); 7273 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0) 7274 error ("iteration variable %qE is predetermined linear", 7275 DECL_NAME (decl)); 7276 } 7277 return (ctx == gimplify_omp_ctxp 7278 || (ctx->region_type == ORT_COMBINED_PARALLEL 7279 && gimplify_omp_ctxp->outer_context == ctx)); 7280 } 7281 7282 if (ctx->region_type != ORT_WORKSHARE 7283 && ctx->region_type != ORT_SIMD 7284 && ctx->region_type != ORT_ACC) 7285 return false; 7286 else if (ctx->outer_context) 7287 return omp_is_private (ctx->outer_context, decl, simd); 7288 return false; 7289 } 7290 7291 /* Return true if DECL is private within a parallel region 7292 that binds to the current construct's context or in parallel 7293 region's REDUCTION clause. */ 7294 7295 static bool 7296 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate) 7297 { 7298 splay_tree_node n; 7299 7300 do 7301 { 7302 ctx = ctx->outer_context; 7303 if (ctx == NULL) 7304 { 7305 if (is_global_var (decl)) 7306 return false; 7307 7308 /* References might be private, but might be shared too, 7309 when checking for copyprivate, assume they might be 7310 private, otherwise assume they might be shared. */ 7311 if (copyprivate) 7312 return true; 7313 7314 if (lang_hooks.decls.omp_privatize_by_reference (decl)) 7315 return false; 7316 7317 /* Treat C++ privatized non-static data members outside 7318 of the privatization the same. */ 7319 if (omp_member_access_dummy_var (decl)) 7320 return false; 7321 7322 return true; 7323 } 7324 7325 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 7326 7327 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0 7328 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)) 7329 continue; 7330 7331 if (n != NULL) 7332 { 7333 if ((n->value & GOVD_LOCAL) != 0 7334 && omp_member_access_dummy_var (decl)) 7335 return false; 7336 return (n->value & GOVD_SHARED) == 0; 7337 } 7338 } 7339 while (ctx->region_type == ORT_WORKSHARE 7340 || ctx->region_type == ORT_SIMD 7341 || ctx->region_type == ORT_ACC); 7342 return false; 7343 } 7344 7345 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */ 7346 7347 static tree 7348 find_decl_expr (tree *tp, int *walk_subtrees, void *data) 7349 { 7350 tree t = *tp; 7351 7352 /* If this node has been visited, unmark it and keep looking. */ 7353 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data) 7354 return t; 7355 7356 if (IS_TYPE_OR_DECL_P (t)) 7357 *walk_subtrees = 0; 7358 return NULL_TREE; 7359 } 7360 7361 /* Scan the OMP clauses in *LIST_P, installing mappings into a new 7362 and previous omp contexts. */ 7363 7364 static void 7365 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p, 7366 enum omp_region_type region_type, 7367 enum tree_code code) 7368 { 7369 struct gimplify_omp_ctx *ctx, *outer_ctx; 7370 tree c; 7371 hash_map<tree, tree> *struct_map_to_clause = NULL; 7372 tree *prev_list_p = NULL; 7373 7374 ctx = new_omp_context (region_type); 7375 outer_ctx = ctx->outer_context; 7376 if (code == OMP_TARGET) 7377 { 7378 if (!lang_GNU_Fortran ()) 7379 ctx->target_map_pointers_as_0len_arrays = true; 7380 ctx->target_map_scalars_firstprivate = true; 7381 } 7382 if (!lang_GNU_Fortran ()) 7383 switch (code) 7384 { 7385 case OMP_TARGET: 7386 case OMP_TARGET_DATA: 7387 case OMP_TARGET_ENTER_DATA: 7388 case OMP_TARGET_EXIT_DATA: 7389 case OACC_DECLARE: 7390 case OACC_HOST_DATA: 7391 ctx->target_firstprivatize_array_bases = true; 7392 default: 7393 break; 7394 } 7395 7396 while ((c = *list_p) != NULL) 7397 { 7398 bool remove = false; 7399 bool notice_outer = true; 7400 const char *check_non_private = NULL; 7401 unsigned int flags; 7402 tree decl; 7403 7404 switch (OMP_CLAUSE_CODE (c)) 7405 { 7406 case OMP_CLAUSE_PRIVATE: 7407 flags = GOVD_PRIVATE | GOVD_EXPLICIT; 7408 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c))) 7409 { 7410 flags |= GOVD_PRIVATE_OUTER_REF; 7411 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1; 7412 } 7413 else 7414 notice_outer = false; 7415 goto do_add; 7416 case OMP_CLAUSE_SHARED: 7417 flags = GOVD_SHARED | GOVD_EXPLICIT; 7418 goto do_add; 7419 case OMP_CLAUSE_FIRSTPRIVATE: 7420 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT; 7421 check_non_private = "firstprivate"; 7422 goto do_add; 7423 case OMP_CLAUSE_LASTPRIVATE: 7424 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT; 7425 check_non_private = "lastprivate"; 7426 decl = OMP_CLAUSE_DECL (c); 7427 if (error_operand_p (decl)) 7428 goto do_add; 7429 else if (outer_ctx 7430 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL 7431 || outer_ctx->region_type == ORT_COMBINED_TEAMS) 7432 && splay_tree_lookup (outer_ctx->variables, 7433 (splay_tree_key) decl) == NULL) 7434 { 7435 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN); 7436 if (outer_ctx->outer_context) 7437 omp_notice_variable (outer_ctx->outer_context, decl, true); 7438 } 7439 else if (outer_ctx 7440 && (outer_ctx->region_type & ORT_TASK) != 0 7441 && outer_ctx->combined_loop 7442 && splay_tree_lookup (outer_ctx->variables, 7443 (splay_tree_key) decl) == NULL) 7444 { 7445 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN); 7446 if (outer_ctx->outer_context) 7447 omp_notice_variable (outer_ctx->outer_context, decl, true); 7448 } 7449 else if (outer_ctx 7450 && (outer_ctx->region_type == ORT_WORKSHARE 7451 || outer_ctx->region_type == ORT_ACC) 7452 && outer_ctx->combined_loop 7453 && splay_tree_lookup (outer_ctx->variables, 7454 (splay_tree_key) decl) == NULL 7455 && !omp_check_private (outer_ctx, decl, false)) 7456 { 7457 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN); 7458 if (outer_ctx->outer_context 7459 && (outer_ctx->outer_context->region_type 7460 == ORT_COMBINED_PARALLEL) 7461 && splay_tree_lookup (outer_ctx->outer_context->variables, 7462 (splay_tree_key) decl) == NULL) 7463 { 7464 struct gimplify_omp_ctx *octx = outer_ctx->outer_context; 7465 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN); 7466 if (octx->outer_context) 7467 { 7468 octx = octx->outer_context; 7469 if (octx->region_type == ORT_WORKSHARE 7470 && octx->combined_loop 7471 && splay_tree_lookup (octx->variables, 7472 (splay_tree_key) decl) == NULL 7473 && !omp_check_private (octx, decl, false)) 7474 { 7475 omp_add_variable (octx, decl, 7476 GOVD_LASTPRIVATE | GOVD_SEEN); 7477 octx = octx->outer_context; 7478 if (octx 7479 && octx->region_type == ORT_COMBINED_TEAMS 7480 && (splay_tree_lookup (octx->variables, 7481 (splay_tree_key) decl) 7482 == NULL)) 7483 { 7484 omp_add_variable (octx, decl, 7485 GOVD_SHARED | GOVD_SEEN); 7486 octx = octx->outer_context; 7487 } 7488 } 7489 if (octx) 7490 omp_notice_variable (octx, decl, true); 7491 } 7492 } 7493 else if (outer_ctx->outer_context) 7494 omp_notice_variable (outer_ctx->outer_context, decl, true); 7495 } 7496 goto do_add; 7497 case OMP_CLAUSE_REDUCTION: 7498 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT; 7499 /* OpenACC permits reductions on private variables. */ 7500 if (!(region_type & ORT_ACC)) 7501 check_non_private = "reduction"; 7502 decl = OMP_CLAUSE_DECL (c); 7503 if (TREE_CODE (decl) == MEM_REF) 7504 { 7505 tree type = TREE_TYPE (decl); 7506 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p, 7507 NULL, is_gimple_val, fb_rvalue, false) 7508 == GS_ERROR) 7509 { 7510 remove = true; 7511 break; 7512 } 7513 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); 7514 if (DECL_P (v)) 7515 { 7516 omp_firstprivatize_variable (ctx, v); 7517 omp_notice_variable (ctx, v, true); 7518 } 7519 decl = TREE_OPERAND (decl, 0); 7520 if (TREE_CODE (decl) == POINTER_PLUS_EXPR) 7521 { 7522 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p, 7523 NULL, is_gimple_val, fb_rvalue, false) 7524 == GS_ERROR) 7525 { 7526 remove = true; 7527 break; 7528 } 7529 v = TREE_OPERAND (decl, 1); 7530 if (DECL_P (v)) 7531 { 7532 omp_firstprivatize_variable (ctx, v); 7533 omp_notice_variable (ctx, v, true); 7534 } 7535 decl = TREE_OPERAND (decl, 0); 7536 } 7537 if (TREE_CODE (decl) == ADDR_EXPR 7538 || TREE_CODE (decl) == INDIRECT_REF) 7539 decl = TREE_OPERAND (decl, 0); 7540 } 7541 goto do_add_decl; 7542 case OMP_CLAUSE_LINEAR: 7543 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL, 7544 is_gimple_val, fb_rvalue) == GS_ERROR) 7545 { 7546 remove = true; 7547 break; 7548 } 7549 else 7550 { 7551 if (code == OMP_SIMD 7552 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c)) 7553 { 7554 struct gimplify_omp_ctx *octx = outer_ctx; 7555 if (octx 7556 && octx->region_type == ORT_WORKSHARE 7557 && octx->combined_loop 7558 && !octx->distribute) 7559 { 7560 if (octx->outer_context 7561 && (octx->outer_context->region_type 7562 == ORT_COMBINED_PARALLEL)) 7563 octx = octx->outer_context->outer_context; 7564 else 7565 octx = octx->outer_context; 7566 } 7567 if (octx 7568 && octx->region_type == ORT_WORKSHARE 7569 && octx->combined_loop 7570 && octx->distribute) 7571 { 7572 error_at (OMP_CLAUSE_LOCATION (c), 7573 "%<linear%> clause for variable other than " 7574 "loop iterator specified on construct " 7575 "combined with %<distribute%>"); 7576 remove = true; 7577 break; 7578 } 7579 } 7580 /* For combined #pragma omp parallel for simd, need to put 7581 lastprivate and perhaps firstprivate too on the 7582 parallel. Similarly for #pragma omp for simd. */ 7583 struct gimplify_omp_ctx *octx = outer_ctx; 7584 decl = NULL_TREE; 7585 do 7586 { 7587 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c) 7588 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 7589 break; 7590 decl = OMP_CLAUSE_DECL (c); 7591 if (error_operand_p (decl)) 7592 { 7593 decl = NULL_TREE; 7594 break; 7595 } 7596 flags = GOVD_SEEN; 7597 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)) 7598 flags |= GOVD_FIRSTPRIVATE; 7599 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 7600 flags |= GOVD_LASTPRIVATE; 7601 if (octx 7602 && octx->region_type == ORT_WORKSHARE 7603 && octx->combined_loop) 7604 { 7605 if (octx->outer_context 7606 && (octx->outer_context->region_type 7607 == ORT_COMBINED_PARALLEL)) 7608 octx = octx->outer_context; 7609 else if (omp_check_private (octx, decl, false)) 7610 break; 7611 } 7612 else if (octx 7613 && (octx->region_type & ORT_TASK) != 0 7614 && octx->combined_loop) 7615 ; 7616 else if (octx 7617 && octx->region_type == ORT_COMBINED_PARALLEL 7618 && ctx->region_type == ORT_WORKSHARE 7619 && octx == outer_ctx) 7620 flags = GOVD_SEEN | GOVD_SHARED; 7621 else if (octx 7622 && octx->region_type == ORT_COMBINED_TEAMS) 7623 flags = GOVD_SEEN | GOVD_SHARED; 7624 else if (octx 7625 && octx->region_type == ORT_COMBINED_TARGET) 7626 { 7627 flags &= ~GOVD_LASTPRIVATE; 7628 if (flags == GOVD_SEEN) 7629 break; 7630 } 7631 else 7632 break; 7633 splay_tree_node on 7634 = splay_tree_lookup (octx->variables, 7635 (splay_tree_key) decl); 7636 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0) 7637 { 7638 octx = NULL; 7639 break; 7640 } 7641 omp_add_variable (octx, decl, flags); 7642 if (octx->outer_context == NULL) 7643 break; 7644 octx = octx->outer_context; 7645 } 7646 while (1); 7647 if (octx 7648 && decl 7649 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c) 7650 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))) 7651 omp_notice_variable (octx, decl, true); 7652 } 7653 flags = GOVD_LINEAR | GOVD_EXPLICIT; 7654 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c) 7655 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 7656 { 7657 notice_outer = false; 7658 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER; 7659 } 7660 goto do_add; 7661 7662 case OMP_CLAUSE_MAP: 7663 decl = OMP_CLAUSE_DECL (c); 7664 if (error_operand_p (decl)) 7665 remove = true; 7666 switch (code) 7667 { 7668 case OMP_TARGET: 7669 break; 7670 case OACC_DATA: 7671 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE) 7672 break; 7673 /* FALLTHRU */ 7674 case OMP_TARGET_DATA: 7675 case OMP_TARGET_ENTER_DATA: 7676 case OMP_TARGET_EXIT_DATA: 7677 case OACC_ENTER_DATA: 7678 case OACC_EXIT_DATA: 7679 case OACC_HOST_DATA: 7680 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER 7681 || (OMP_CLAUSE_MAP_KIND (c) 7682 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)) 7683 /* For target {,enter ,exit }data only the array slice is 7684 mapped, but not the pointer to it. */ 7685 remove = true; 7686 break; 7687 default: 7688 break; 7689 } 7690 if (remove) 7691 break; 7692 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC)) 7693 { 7694 struct gimplify_omp_ctx *octx; 7695 for (octx = outer_ctx; octx; octx = octx->outer_context) 7696 { 7697 if (octx->region_type != ORT_ACC_HOST_DATA) 7698 break; 7699 splay_tree_node n2 7700 = splay_tree_lookup (octx->variables, 7701 (splay_tree_key) decl); 7702 if (n2) 7703 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE " 7704 "declared in enclosing %<host_data%> region", 7705 DECL_NAME (decl)); 7706 } 7707 } 7708 if (OMP_CLAUSE_SIZE (c) == NULL_TREE) 7709 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl) 7710 : TYPE_SIZE_UNIT (TREE_TYPE (decl)); 7711 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p, 7712 NULL, is_gimple_val, fb_rvalue) == GS_ERROR) 7713 { 7714 remove = true; 7715 break; 7716 } 7717 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER 7718 || (OMP_CLAUSE_MAP_KIND (c) 7719 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)) 7720 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST) 7721 { 7722 OMP_CLAUSE_SIZE (c) 7723 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL, 7724 false); 7725 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c), 7726 GOVD_FIRSTPRIVATE | GOVD_SEEN); 7727 } 7728 if (!DECL_P (decl)) 7729 { 7730 tree d = decl, *pd; 7731 if (TREE_CODE (d) == ARRAY_REF) 7732 { 7733 while (TREE_CODE (d) == ARRAY_REF) 7734 d = TREE_OPERAND (d, 0); 7735 if (TREE_CODE (d) == COMPONENT_REF 7736 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE) 7737 decl = d; 7738 } 7739 pd = &OMP_CLAUSE_DECL (c); 7740 if (d == decl 7741 && TREE_CODE (decl) == INDIRECT_REF 7742 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF 7743 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0))) 7744 == REFERENCE_TYPE)) 7745 { 7746 pd = &TREE_OPERAND (decl, 0); 7747 decl = TREE_OPERAND (decl, 0); 7748 } 7749 if (TREE_CODE (decl) == COMPONENT_REF) 7750 { 7751 while (TREE_CODE (decl) == COMPONENT_REF) 7752 decl = TREE_OPERAND (decl, 0); 7753 if (TREE_CODE (decl) == INDIRECT_REF 7754 && DECL_P (TREE_OPERAND (decl, 0)) 7755 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0))) 7756 == REFERENCE_TYPE)) 7757 decl = TREE_OPERAND (decl, 0); 7758 } 7759 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue) 7760 == GS_ERROR) 7761 { 7762 remove = true; 7763 break; 7764 } 7765 if (DECL_P (decl)) 7766 { 7767 if (error_operand_p (decl)) 7768 { 7769 remove = true; 7770 break; 7771 } 7772 7773 tree stype = TREE_TYPE (decl); 7774 if (TREE_CODE (stype) == REFERENCE_TYPE) 7775 stype = TREE_TYPE (stype); 7776 if (TYPE_SIZE_UNIT (stype) == NULL 7777 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST) 7778 { 7779 error_at (OMP_CLAUSE_LOCATION (c), 7780 "mapping field %qE of variable length " 7781 "structure", OMP_CLAUSE_DECL (c)); 7782 remove = true; 7783 break; 7784 } 7785 7786 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER) 7787 { 7788 /* Error recovery. */ 7789 if (prev_list_p == NULL) 7790 { 7791 remove = true; 7792 break; 7793 } 7794 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c) 7795 { 7796 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p); 7797 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c) 7798 { 7799 remove = true; 7800 break; 7801 } 7802 } 7803 } 7804 7805 tree offset; 7806 HOST_WIDE_INT bitsize, bitpos; 7807 machine_mode mode; 7808 int unsignedp, reversep, volatilep = 0; 7809 tree base = OMP_CLAUSE_DECL (c); 7810 while (TREE_CODE (base) == ARRAY_REF) 7811 base = TREE_OPERAND (base, 0); 7812 if (TREE_CODE (base) == INDIRECT_REF) 7813 base = TREE_OPERAND (base, 0); 7814 base = get_inner_reference (base, &bitsize, &bitpos, &offset, 7815 &mode, &unsignedp, &reversep, 7816 &volatilep); 7817 tree orig_base = base; 7818 if ((TREE_CODE (base) == INDIRECT_REF 7819 || (TREE_CODE (base) == MEM_REF 7820 && integer_zerop (TREE_OPERAND (base, 1)))) 7821 && DECL_P (TREE_OPERAND (base, 0)) 7822 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0))) 7823 == REFERENCE_TYPE)) 7824 base = TREE_OPERAND (base, 0); 7825 gcc_assert (base == decl 7826 && (offset == NULL_TREE 7827 || TREE_CODE (offset) == INTEGER_CST)); 7828 7829 splay_tree_node n 7830 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 7831 bool ptr = (OMP_CLAUSE_MAP_KIND (c) 7832 == GOMP_MAP_ALWAYS_POINTER); 7833 if (n == NULL || (n->value & GOVD_MAP) == 0) 7834 { 7835 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c), 7836 OMP_CLAUSE_MAP); 7837 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT); 7838 if (orig_base != base) 7839 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base); 7840 else 7841 OMP_CLAUSE_DECL (l) = decl; 7842 OMP_CLAUSE_SIZE (l) = size_int (1); 7843 if (struct_map_to_clause == NULL) 7844 struct_map_to_clause = new hash_map<tree, tree>; 7845 struct_map_to_clause->put (decl, l); 7846 if (ptr) 7847 { 7848 enum gomp_map_kind mkind 7849 = code == OMP_TARGET_EXIT_DATA 7850 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC; 7851 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 7852 OMP_CLAUSE_MAP); 7853 OMP_CLAUSE_SET_MAP_KIND (c2, mkind); 7854 OMP_CLAUSE_DECL (c2) 7855 = unshare_expr (OMP_CLAUSE_DECL (c)); 7856 OMP_CLAUSE_CHAIN (c2) = *prev_list_p; 7857 OMP_CLAUSE_SIZE (c2) 7858 = TYPE_SIZE_UNIT (ptr_type_node); 7859 OMP_CLAUSE_CHAIN (l) = c2; 7860 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c) 7861 { 7862 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p); 7863 tree c3 7864 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 7865 OMP_CLAUSE_MAP); 7866 OMP_CLAUSE_SET_MAP_KIND (c3, mkind); 7867 OMP_CLAUSE_DECL (c3) 7868 = unshare_expr (OMP_CLAUSE_DECL (c4)); 7869 OMP_CLAUSE_SIZE (c3) 7870 = TYPE_SIZE_UNIT (ptr_type_node); 7871 OMP_CLAUSE_CHAIN (c3) = *prev_list_p; 7872 OMP_CLAUSE_CHAIN (c2) = c3; 7873 } 7874 *prev_list_p = l; 7875 prev_list_p = NULL; 7876 } 7877 else 7878 { 7879 OMP_CLAUSE_CHAIN (l) = c; 7880 *list_p = l; 7881 list_p = &OMP_CLAUSE_CHAIN (l); 7882 } 7883 if (orig_base != base && code == OMP_TARGET) 7884 { 7885 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 7886 OMP_CLAUSE_MAP); 7887 enum gomp_map_kind mkind 7888 = GOMP_MAP_FIRSTPRIVATE_REFERENCE; 7889 OMP_CLAUSE_SET_MAP_KIND (c2, mkind); 7890 OMP_CLAUSE_DECL (c2) = decl; 7891 OMP_CLAUSE_SIZE (c2) = size_zero_node; 7892 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l); 7893 OMP_CLAUSE_CHAIN (l) = c2; 7894 } 7895 flags = GOVD_MAP | GOVD_EXPLICIT; 7896 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr) 7897 flags |= GOVD_SEEN; 7898 goto do_add_decl; 7899 } 7900 else 7901 { 7902 tree *osc = struct_map_to_clause->get (decl); 7903 tree *sc = NULL, *scp = NULL; 7904 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr) 7905 n->value |= GOVD_SEEN; 7906 offset_int o1, o2; 7907 if (offset) 7908 o1 = wi::to_offset (offset); 7909 else 7910 o1 = 0; 7911 if (bitpos) 7912 o1 = o1 + bitpos / BITS_PER_UNIT; 7913 sc = &OMP_CLAUSE_CHAIN (*osc); 7914 if (*sc != c 7915 && (OMP_CLAUSE_MAP_KIND (*sc) 7916 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)) 7917 sc = &OMP_CLAUSE_CHAIN (*sc); 7918 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc)) 7919 if (ptr && sc == prev_list_p) 7920 break; 7921 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc)) 7922 != COMPONENT_REF 7923 && (TREE_CODE (OMP_CLAUSE_DECL (*sc)) 7924 != INDIRECT_REF) 7925 && (TREE_CODE (OMP_CLAUSE_DECL (*sc)) 7926 != ARRAY_REF)) 7927 break; 7928 else 7929 { 7930 tree offset2; 7931 HOST_WIDE_INT bitsize2, bitpos2; 7932 base = OMP_CLAUSE_DECL (*sc); 7933 if (TREE_CODE (base) == ARRAY_REF) 7934 { 7935 while (TREE_CODE (base) == ARRAY_REF) 7936 base = TREE_OPERAND (base, 0); 7937 if (TREE_CODE (base) != COMPONENT_REF 7938 || (TREE_CODE (TREE_TYPE (base)) 7939 != ARRAY_TYPE)) 7940 break; 7941 } 7942 else if (TREE_CODE (base) == INDIRECT_REF 7943 && (TREE_CODE (TREE_OPERAND (base, 0)) 7944 == COMPONENT_REF) 7945 && (TREE_CODE (TREE_TYPE 7946 (TREE_OPERAND (base, 0))) 7947 == REFERENCE_TYPE)) 7948 base = TREE_OPERAND (base, 0); 7949 base = get_inner_reference (base, &bitsize2, 7950 &bitpos2, &offset2, 7951 &mode, &unsignedp, 7952 &reversep, &volatilep); 7953 if ((TREE_CODE (base) == INDIRECT_REF 7954 || (TREE_CODE (base) == MEM_REF 7955 && integer_zerop (TREE_OPERAND (base, 7956 1)))) 7957 && DECL_P (TREE_OPERAND (base, 0)) 7958 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 7959 0))) 7960 == REFERENCE_TYPE)) 7961 base = TREE_OPERAND (base, 0); 7962 if (base != decl) 7963 break; 7964 if (scp) 7965 continue; 7966 gcc_assert (offset == NULL_TREE 7967 || TREE_CODE (offset) == INTEGER_CST); 7968 tree d1 = OMP_CLAUSE_DECL (*sc); 7969 tree d2 = OMP_CLAUSE_DECL (c); 7970 while (TREE_CODE (d1) == ARRAY_REF) 7971 d1 = TREE_OPERAND (d1, 0); 7972 while (TREE_CODE (d2) == ARRAY_REF) 7973 d2 = TREE_OPERAND (d2, 0); 7974 if (TREE_CODE (d1) == INDIRECT_REF) 7975 d1 = TREE_OPERAND (d1, 0); 7976 if (TREE_CODE (d2) == INDIRECT_REF) 7977 d2 = TREE_OPERAND (d2, 0); 7978 while (TREE_CODE (d1) == COMPONENT_REF) 7979 if (TREE_CODE (d2) == COMPONENT_REF 7980 && TREE_OPERAND (d1, 1) 7981 == TREE_OPERAND (d2, 1)) 7982 { 7983 d1 = TREE_OPERAND (d1, 0); 7984 d2 = TREE_OPERAND (d2, 0); 7985 } 7986 else 7987 break; 7988 if (d1 == d2) 7989 { 7990 error_at (OMP_CLAUSE_LOCATION (c), 7991 "%qE appears more than once in map " 7992 "clauses", OMP_CLAUSE_DECL (c)); 7993 remove = true; 7994 break; 7995 } 7996 if (offset2) 7997 o2 = wi::to_offset (offset2); 7998 else 7999 o2 = 0; 8000 if (bitpos2) 8001 o2 = o2 + bitpos2 / BITS_PER_UNIT; 8002 if (wi::ltu_p (o1, o2) 8003 || (wi::eq_p (o1, o2) && bitpos < bitpos2)) 8004 { 8005 if (ptr) 8006 scp = sc; 8007 else 8008 break; 8009 } 8010 } 8011 if (remove) 8012 break; 8013 OMP_CLAUSE_SIZE (*osc) 8014 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), 8015 size_one_node); 8016 if (ptr) 8017 { 8018 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 8019 OMP_CLAUSE_MAP); 8020 tree cl = NULL_TREE; 8021 enum gomp_map_kind mkind 8022 = code == OMP_TARGET_EXIT_DATA 8023 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC; 8024 OMP_CLAUSE_SET_MAP_KIND (c2, mkind); 8025 OMP_CLAUSE_DECL (c2) 8026 = unshare_expr (OMP_CLAUSE_DECL (c)); 8027 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p; 8028 OMP_CLAUSE_SIZE (c2) 8029 = TYPE_SIZE_UNIT (ptr_type_node); 8030 cl = scp ? *prev_list_p : c2; 8031 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c) 8032 { 8033 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p); 8034 tree c3 8035 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 8036 OMP_CLAUSE_MAP); 8037 OMP_CLAUSE_SET_MAP_KIND (c3, mkind); 8038 OMP_CLAUSE_DECL (c3) 8039 = unshare_expr (OMP_CLAUSE_DECL (c4)); 8040 OMP_CLAUSE_SIZE (c3) 8041 = TYPE_SIZE_UNIT (ptr_type_node); 8042 OMP_CLAUSE_CHAIN (c3) = *prev_list_p; 8043 if (!scp) 8044 OMP_CLAUSE_CHAIN (c2) = c3; 8045 else 8046 cl = c3; 8047 } 8048 if (scp) 8049 *scp = c2; 8050 if (sc == prev_list_p) 8051 { 8052 *sc = cl; 8053 prev_list_p = NULL; 8054 } 8055 else 8056 { 8057 *prev_list_p = OMP_CLAUSE_CHAIN (c); 8058 list_p = prev_list_p; 8059 prev_list_p = NULL; 8060 OMP_CLAUSE_CHAIN (c) = *sc; 8061 *sc = cl; 8062 continue; 8063 } 8064 } 8065 else if (*sc != c) 8066 { 8067 *list_p = OMP_CLAUSE_CHAIN (c); 8068 OMP_CLAUSE_CHAIN (c) = *sc; 8069 *sc = c; 8070 continue; 8071 } 8072 } 8073 } 8074 if (!remove 8075 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER 8076 && OMP_CLAUSE_CHAIN (c) 8077 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP 8078 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c)) 8079 == GOMP_MAP_ALWAYS_POINTER)) 8080 prev_list_p = list_p; 8081 break; 8082 } 8083 flags = GOVD_MAP | GOVD_EXPLICIT; 8084 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO 8085 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM) 8086 flags |= GOVD_MAP_ALWAYS_TO; 8087 goto do_add; 8088 8089 case OMP_CLAUSE_DEPEND: 8090 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK) 8091 { 8092 tree deps = OMP_CLAUSE_DECL (c); 8093 while (deps && TREE_CODE (deps) == TREE_LIST) 8094 { 8095 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR 8096 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1))) 8097 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1), 8098 pre_p, NULL, is_gimple_val, fb_rvalue); 8099 deps = TREE_CHAIN (deps); 8100 } 8101 break; 8102 } 8103 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE) 8104 break; 8105 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR) 8106 { 8107 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p, 8108 NULL, is_gimple_val, fb_rvalue); 8109 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1); 8110 } 8111 if (error_operand_p (OMP_CLAUSE_DECL (c))) 8112 { 8113 remove = true; 8114 break; 8115 } 8116 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c)); 8117 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL, 8118 is_gimple_val, fb_rvalue) == GS_ERROR) 8119 { 8120 remove = true; 8121 break; 8122 } 8123 break; 8124 8125 case OMP_CLAUSE_TO: 8126 case OMP_CLAUSE_FROM: 8127 case OMP_CLAUSE__CACHE_: 8128 decl = OMP_CLAUSE_DECL (c); 8129 if (error_operand_p (decl)) 8130 { 8131 remove = true; 8132 break; 8133 } 8134 if (OMP_CLAUSE_SIZE (c) == NULL_TREE) 8135 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl) 8136 : TYPE_SIZE_UNIT (TREE_TYPE (decl)); 8137 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p, 8138 NULL, is_gimple_val, fb_rvalue) == GS_ERROR) 8139 { 8140 remove = true; 8141 break; 8142 } 8143 if (!DECL_P (decl)) 8144 { 8145 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, 8146 NULL, is_gimple_lvalue, fb_lvalue) 8147 == GS_ERROR) 8148 { 8149 remove = true; 8150 break; 8151 } 8152 break; 8153 } 8154 goto do_notice; 8155 8156 case OMP_CLAUSE_USE_DEVICE_PTR: 8157 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT; 8158 goto do_add; 8159 case OMP_CLAUSE_IS_DEVICE_PTR: 8160 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT; 8161 goto do_add; 8162 8163 do_add: 8164 decl = OMP_CLAUSE_DECL (c); 8165 do_add_decl: 8166 if (error_operand_p (decl)) 8167 { 8168 remove = true; 8169 break; 8170 } 8171 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0) 8172 { 8173 tree t = omp_member_access_dummy_var (decl); 8174 if (t) 8175 { 8176 tree v = DECL_VALUE_EXPR (decl); 8177 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1)); 8178 if (outer_ctx) 8179 omp_notice_variable (outer_ctx, t, true); 8180 } 8181 } 8182 if (code == OACC_DATA 8183 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 8184 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER) 8185 flags |= GOVD_MAP_0LEN_ARRAY; 8186 omp_add_variable (ctx, decl, flags); 8187 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION 8188 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 8189 { 8190 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c), 8191 GOVD_LOCAL | GOVD_SEEN); 8192 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) 8193 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c), 8194 find_decl_expr, 8195 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c), 8196 NULL) == NULL_TREE) 8197 omp_add_variable (ctx, 8198 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c), 8199 GOVD_LOCAL | GOVD_SEEN); 8200 gimplify_omp_ctxp = ctx; 8201 push_gimplify_context (); 8202 8203 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL; 8204 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; 8205 8206 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c), 8207 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)); 8208 pop_gimplify_context 8209 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))); 8210 push_gimplify_context (); 8211 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c), 8212 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)); 8213 pop_gimplify_context 8214 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c))); 8215 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE; 8216 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE; 8217 8218 gimplify_omp_ctxp = outer_ctx; 8219 } 8220 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 8221 && OMP_CLAUSE_LASTPRIVATE_STMT (c)) 8222 { 8223 gimplify_omp_ctxp = ctx; 8224 push_gimplify_context (); 8225 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR) 8226 { 8227 tree bind = build3 (BIND_EXPR, void_type_node, NULL, 8228 NULL, NULL); 8229 TREE_SIDE_EFFECTS (bind) = 1; 8230 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c); 8231 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind; 8232 } 8233 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c), 8234 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)); 8235 pop_gimplify_context 8236 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))); 8237 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE; 8238 8239 gimplify_omp_ctxp = outer_ctx; 8240 } 8241 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 8242 && OMP_CLAUSE_LINEAR_STMT (c)) 8243 { 8244 gimplify_omp_ctxp = ctx; 8245 push_gimplify_context (); 8246 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR) 8247 { 8248 tree bind = build3 (BIND_EXPR, void_type_node, NULL, 8249 NULL, NULL); 8250 TREE_SIDE_EFFECTS (bind) = 1; 8251 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c); 8252 OMP_CLAUSE_LINEAR_STMT (c) = bind; 8253 } 8254 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c), 8255 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)); 8256 pop_gimplify_context 8257 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))); 8258 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE; 8259 8260 gimplify_omp_ctxp = outer_ctx; 8261 } 8262 if (notice_outer) 8263 goto do_notice; 8264 break; 8265 8266 case OMP_CLAUSE_COPYIN: 8267 case OMP_CLAUSE_COPYPRIVATE: 8268 decl = OMP_CLAUSE_DECL (c); 8269 if (error_operand_p (decl)) 8270 { 8271 remove = true; 8272 break; 8273 } 8274 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE 8275 && !remove 8276 && !omp_check_private (ctx, decl, true)) 8277 { 8278 remove = true; 8279 if (is_global_var (decl)) 8280 { 8281 if (DECL_THREAD_LOCAL_P (decl)) 8282 remove = false; 8283 else if (DECL_HAS_VALUE_EXPR_P (decl)) 8284 { 8285 tree value = get_base_address (DECL_VALUE_EXPR (decl)); 8286 8287 if (value 8288 && DECL_P (value) 8289 && DECL_THREAD_LOCAL_P (value)) 8290 remove = false; 8291 } 8292 } 8293 if (remove) 8294 error_at (OMP_CLAUSE_LOCATION (c), 8295 "copyprivate variable %qE is not threadprivate" 8296 " or private in outer context", DECL_NAME (decl)); 8297 } 8298 do_notice: 8299 if (outer_ctx) 8300 omp_notice_variable (outer_ctx, decl, true); 8301 if (check_non_private 8302 && region_type == ORT_WORKSHARE 8303 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION 8304 || decl == OMP_CLAUSE_DECL (c) 8305 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF 8306 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0)) 8307 == ADDR_EXPR 8308 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0)) 8309 == POINTER_PLUS_EXPR 8310 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND 8311 (OMP_CLAUSE_DECL (c), 0), 0)) 8312 == ADDR_EXPR))))) 8313 && omp_check_private (ctx, decl, false)) 8314 { 8315 error ("%s variable %qE is private in outer context", 8316 check_non_private, DECL_NAME (decl)); 8317 remove = true; 8318 } 8319 break; 8320 8321 case OMP_CLAUSE_IF: 8322 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK 8323 && OMP_CLAUSE_IF_MODIFIER (c) != code) 8324 { 8325 const char *p[2]; 8326 for (int i = 0; i < 2; i++) 8327 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code) 8328 { 8329 case OMP_PARALLEL: p[i] = "parallel"; break; 8330 case OMP_TASK: p[i] = "task"; break; 8331 case OMP_TASKLOOP: p[i] = "taskloop"; break; 8332 case OMP_TARGET_DATA: p[i] = "target data"; break; 8333 case OMP_TARGET: p[i] = "target"; break; 8334 case OMP_TARGET_UPDATE: p[i] = "target update"; break; 8335 case OMP_TARGET_ENTER_DATA: 8336 p[i] = "target enter data"; break; 8337 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break; 8338 default: gcc_unreachable (); 8339 } 8340 error_at (OMP_CLAUSE_LOCATION (c), 8341 "expected %qs %<if%> clause modifier rather than %qs", 8342 p[0], p[1]); 8343 remove = true; 8344 } 8345 /* Fall through. */ 8346 8347 case OMP_CLAUSE_FINAL: 8348 OMP_CLAUSE_OPERAND (c, 0) 8349 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0)); 8350 /* Fall through. */ 8351 8352 case OMP_CLAUSE_SCHEDULE: 8353 case OMP_CLAUSE_NUM_THREADS: 8354 case OMP_CLAUSE_NUM_TEAMS: 8355 case OMP_CLAUSE_THREAD_LIMIT: 8356 case OMP_CLAUSE_DIST_SCHEDULE: 8357 case OMP_CLAUSE_DEVICE: 8358 case OMP_CLAUSE_PRIORITY: 8359 case OMP_CLAUSE_GRAINSIZE: 8360 case OMP_CLAUSE_NUM_TASKS: 8361 case OMP_CLAUSE_HINT: 8362 case OMP_CLAUSE__CILK_FOR_COUNT_: 8363 case OMP_CLAUSE_ASYNC: 8364 case OMP_CLAUSE_WAIT: 8365 case OMP_CLAUSE_NUM_GANGS: 8366 case OMP_CLAUSE_NUM_WORKERS: 8367 case OMP_CLAUSE_VECTOR_LENGTH: 8368 case OMP_CLAUSE_WORKER: 8369 case OMP_CLAUSE_VECTOR: 8370 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL, 8371 is_gimple_val, fb_rvalue) == GS_ERROR) 8372 remove = true; 8373 break; 8374 8375 case OMP_CLAUSE_GANG: 8376 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL, 8377 is_gimple_val, fb_rvalue) == GS_ERROR) 8378 remove = true; 8379 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL, 8380 is_gimple_val, fb_rvalue) == GS_ERROR) 8381 remove = true; 8382 break; 8383 8384 case OMP_CLAUSE_NOWAIT: 8385 case OMP_CLAUSE_ORDERED: 8386 case OMP_CLAUSE_UNTIED: 8387 case OMP_CLAUSE_COLLAPSE: 8388 case OMP_CLAUSE_TILE: 8389 case OMP_CLAUSE_AUTO: 8390 case OMP_CLAUSE_SEQ: 8391 case OMP_CLAUSE_INDEPENDENT: 8392 case OMP_CLAUSE_MERGEABLE: 8393 case OMP_CLAUSE_PROC_BIND: 8394 case OMP_CLAUSE_SAFELEN: 8395 case OMP_CLAUSE_SIMDLEN: 8396 case OMP_CLAUSE_NOGROUP: 8397 case OMP_CLAUSE_THREADS: 8398 case OMP_CLAUSE_SIMD: 8399 break; 8400 8401 case OMP_CLAUSE_DEFAULTMAP: 8402 ctx->target_map_scalars_firstprivate = false; 8403 break; 8404 8405 case OMP_CLAUSE_ALIGNED: 8406 decl = OMP_CLAUSE_DECL (c); 8407 if (error_operand_p (decl)) 8408 { 8409 remove = true; 8410 break; 8411 } 8412 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL, 8413 is_gimple_val, fb_rvalue) == GS_ERROR) 8414 { 8415 remove = true; 8416 break; 8417 } 8418 if (!is_global_var (decl) 8419 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE) 8420 omp_add_variable (ctx, decl, GOVD_ALIGNED); 8421 break; 8422 8423 case OMP_CLAUSE_DEFAULT: 8424 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c); 8425 break; 8426 8427 default: 8428 gcc_unreachable (); 8429 } 8430 8431 if (code == OACC_DATA 8432 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 8433 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER) 8434 remove = true; 8435 if (remove) 8436 *list_p = OMP_CLAUSE_CHAIN (c); 8437 else 8438 list_p = &OMP_CLAUSE_CHAIN (c); 8439 } 8440 8441 gimplify_omp_ctxp = ctx; 8442 if (struct_map_to_clause) 8443 delete struct_map_to_clause; 8444 } 8445 8446 /* Return true if DECL is a candidate for shared to firstprivate 8447 optimization. We only consider non-addressable scalars, not 8448 too big, and not references. */ 8449 8450 static bool 8451 omp_shared_to_firstprivate_optimizable_decl_p (tree decl) 8452 { 8453 if (TREE_ADDRESSABLE (decl)) 8454 return false; 8455 tree type = TREE_TYPE (decl); 8456 if (!is_gimple_reg_type (type) 8457 || TREE_CODE (type) == REFERENCE_TYPE 8458 || TREE_ADDRESSABLE (type)) 8459 return false; 8460 /* Don't optimize too large decls, as each thread/task will have 8461 its own. */ 8462 HOST_WIDE_INT len = int_size_in_bytes (type); 8463 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT) 8464 return false; 8465 if (lang_hooks.decls.omp_privatize_by_reference (decl)) 8466 return false; 8467 return true; 8468 } 8469 8470 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*. 8471 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as 8472 GOVD_WRITTEN in outer contexts. */ 8473 8474 static void 8475 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl) 8476 { 8477 for (; ctx; ctx = ctx->outer_context) 8478 { 8479 splay_tree_node n = splay_tree_lookup (ctx->variables, 8480 (splay_tree_key) decl); 8481 if (n == NULL) 8482 continue; 8483 else if (n->value & GOVD_SHARED) 8484 { 8485 n->value |= GOVD_WRITTEN; 8486 return; 8487 } 8488 else if (n->value & GOVD_DATA_SHARE_CLASS) 8489 return; 8490 } 8491 } 8492 8493 /* Helper callback for walk_gimple_seq to discover possible stores 8494 to omp_shared_to_firstprivate_optimizable_decl_p decls and set 8495 GOVD_WRITTEN if they are GOVD_SHARED in some outer context 8496 for those. */ 8497 8498 static tree 8499 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data) 8500 { 8501 struct walk_stmt_info *wi = (struct walk_stmt_info *) data; 8502 8503 *walk_subtrees = 0; 8504 if (!wi->is_lhs) 8505 return NULL_TREE; 8506 8507 tree op = *tp; 8508 do 8509 { 8510 if (handled_component_p (op)) 8511 op = TREE_OPERAND (op, 0); 8512 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF) 8513 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR) 8514 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0); 8515 else 8516 break; 8517 } 8518 while (1); 8519 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op)) 8520 return NULL_TREE; 8521 8522 omp_mark_stores (gimplify_omp_ctxp, op); 8523 return NULL_TREE; 8524 } 8525 8526 /* Helper callback for walk_gimple_seq to discover possible stores 8527 to omp_shared_to_firstprivate_optimizable_decl_p decls and set 8528 GOVD_WRITTEN if they are GOVD_SHARED in some outer context 8529 for those. */ 8530 8531 static tree 8532 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p, 8533 bool *handled_ops_p, 8534 struct walk_stmt_info *wi) 8535 { 8536 gimple *stmt = gsi_stmt (*gsi_p); 8537 switch (gimple_code (stmt)) 8538 { 8539 /* Don't recurse on OpenMP constructs for which 8540 gimplify_adjust_omp_clauses already handled the bodies, 8541 except handle gimple_omp_for_pre_body. */ 8542 case GIMPLE_OMP_FOR: 8543 *handled_ops_p = true; 8544 if (gimple_omp_for_pre_body (stmt)) 8545 walk_gimple_seq (gimple_omp_for_pre_body (stmt), 8546 omp_find_stores_stmt, omp_find_stores_op, wi); 8547 break; 8548 case GIMPLE_OMP_PARALLEL: 8549 case GIMPLE_OMP_TASK: 8550 case GIMPLE_OMP_SECTIONS: 8551 case GIMPLE_OMP_SINGLE: 8552 case GIMPLE_OMP_TARGET: 8553 case GIMPLE_OMP_TEAMS: 8554 case GIMPLE_OMP_CRITICAL: 8555 *handled_ops_p = true; 8556 break; 8557 default: 8558 break; 8559 } 8560 return NULL_TREE; 8561 } 8562 8563 struct gimplify_adjust_omp_clauses_data 8564 { 8565 tree *list_p; 8566 gimple_seq *pre_p; 8567 }; 8568 8569 /* For all variables that were not actually used within the context, 8570 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */ 8571 8572 static int 8573 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data) 8574 { 8575 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p; 8576 gimple_seq *pre_p 8577 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p; 8578 tree decl = (tree) n->key; 8579 unsigned flags = n->value; 8580 enum omp_clause_code code; 8581 tree clause; 8582 bool private_debug; 8583 8584 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL)) 8585 return 0; 8586 if ((flags & GOVD_SEEN) == 0) 8587 return 0; 8588 if (flags & GOVD_DEBUG_PRIVATE) 8589 { 8590 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED); 8591 private_debug = true; 8592 } 8593 else if (flags & GOVD_MAP) 8594 private_debug = false; 8595 else 8596 private_debug 8597 = lang_hooks.decls.omp_private_debug_clause (decl, 8598 !!(flags & GOVD_SHARED)); 8599 if (private_debug) 8600 code = OMP_CLAUSE_PRIVATE; 8601 else if (flags & GOVD_MAP) 8602 { 8603 code = OMP_CLAUSE_MAP; 8604 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0 8605 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl)))) 8606 { 8607 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl); 8608 return 0; 8609 } 8610 } 8611 else if (flags & GOVD_SHARED) 8612 { 8613 if (is_global_var (decl)) 8614 { 8615 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context; 8616 while (ctx != NULL) 8617 { 8618 splay_tree_node on 8619 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 8620 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE 8621 | GOVD_PRIVATE | GOVD_REDUCTION 8622 | GOVD_LINEAR | GOVD_MAP)) != 0) 8623 break; 8624 ctx = ctx->outer_context; 8625 } 8626 if (ctx == NULL) 8627 return 0; 8628 } 8629 code = OMP_CLAUSE_SHARED; 8630 } 8631 else if (flags & GOVD_PRIVATE) 8632 code = OMP_CLAUSE_PRIVATE; 8633 else if (flags & GOVD_FIRSTPRIVATE) 8634 { 8635 code = OMP_CLAUSE_FIRSTPRIVATE; 8636 if ((gimplify_omp_ctxp->region_type & ORT_TARGET) 8637 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0 8638 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl)))) 8639 { 8640 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on " 8641 "%<target%> construct", decl); 8642 return 0; 8643 } 8644 } 8645 else if (flags & GOVD_LASTPRIVATE) 8646 code = OMP_CLAUSE_LASTPRIVATE; 8647 else if (flags & GOVD_ALIGNED) 8648 return 0; 8649 else 8650 gcc_unreachable (); 8651 8652 if (((flags & GOVD_LASTPRIVATE) 8653 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN))) 8654 && omp_shared_to_firstprivate_optimizable_decl_p (decl)) 8655 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl); 8656 8657 tree chain = *list_p; 8658 clause = build_omp_clause (input_location, code); 8659 OMP_CLAUSE_DECL (clause) = decl; 8660 OMP_CLAUSE_CHAIN (clause) = chain; 8661 if (private_debug) 8662 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1; 8663 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF)) 8664 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1; 8665 else if (code == OMP_CLAUSE_SHARED 8666 && (flags & GOVD_WRITTEN) == 0 8667 && omp_shared_to_firstprivate_optimizable_decl_p (decl)) 8668 OMP_CLAUSE_SHARED_READONLY (clause) = 1; 8669 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0) 8670 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1; 8671 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0) 8672 { 8673 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP); 8674 OMP_CLAUSE_DECL (nc) = decl; 8675 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE 8676 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE) 8677 OMP_CLAUSE_DECL (clause) 8678 = build_simple_mem_ref_loc (input_location, decl); 8679 OMP_CLAUSE_DECL (clause) 8680 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause), 8681 build_int_cst (build_pointer_type (char_type_node), 0)); 8682 OMP_CLAUSE_SIZE (clause) = size_zero_node; 8683 OMP_CLAUSE_SIZE (nc) = size_zero_node; 8684 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC); 8685 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1; 8686 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER); 8687 OMP_CLAUSE_CHAIN (nc) = chain; 8688 OMP_CLAUSE_CHAIN (clause) = nc; 8689 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 8690 gimplify_omp_ctxp = ctx->outer_context; 8691 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0), 8692 pre_p, NULL, is_gimple_val, fb_rvalue); 8693 gimplify_omp_ctxp = ctx; 8694 } 8695 else if (code == OMP_CLAUSE_MAP) 8696 { 8697 int kind = (flags & GOVD_MAP_TO_ONLY 8698 ? GOMP_MAP_TO 8699 : GOMP_MAP_TOFROM); 8700 if (flags & GOVD_MAP_FORCE) 8701 kind |= GOMP_MAP_FLAG_FORCE; 8702 OMP_CLAUSE_SET_MAP_KIND (clause, kind); 8703 if (DECL_SIZE (decl) 8704 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 8705 { 8706 tree decl2 = DECL_VALUE_EXPR (decl); 8707 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); 8708 decl2 = TREE_OPERAND (decl2, 0); 8709 gcc_assert (DECL_P (decl2)); 8710 tree mem = build_simple_mem_ref (decl2); 8711 OMP_CLAUSE_DECL (clause) = mem; 8712 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl)); 8713 if (gimplify_omp_ctxp->outer_context) 8714 { 8715 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context; 8716 omp_notice_variable (ctx, decl2, true); 8717 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true); 8718 } 8719 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause), 8720 OMP_CLAUSE_MAP); 8721 OMP_CLAUSE_DECL (nc) = decl; 8722 OMP_CLAUSE_SIZE (nc) = size_zero_node; 8723 if (gimplify_omp_ctxp->target_firstprivatize_array_bases) 8724 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER); 8725 else 8726 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER); 8727 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause); 8728 OMP_CLAUSE_CHAIN (clause) = nc; 8729 } 8730 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases 8731 && lang_hooks.decls.omp_privatize_by_reference (decl)) 8732 { 8733 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl); 8734 OMP_CLAUSE_SIZE (clause) 8735 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))); 8736 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 8737 gimplify_omp_ctxp = ctx->outer_context; 8738 gimplify_expr (&OMP_CLAUSE_SIZE (clause), 8739 pre_p, NULL, is_gimple_val, fb_rvalue); 8740 gimplify_omp_ctxp = ctx; 8741 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause), 8742 OMP_CLAUSE_MAP); 8743 OMP_CLAUSE_DECL (nc) = decl; 8744 OMP_CLAUSE_SIZE (nc) = size_zero_node; 8745 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE); 8746 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause); 8747 OMP_CLAUSE_CHAIN (clause) = nc; 8748 } 8749 else 8750 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl); 8751 } 8752 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0) 8753 { 8754 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE); 8755 OMP_CLAUSE_DECL (nc) = decl; 8756 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1; 8757 OMP_CLAUSE_CHAIN (nc) = chain; 8758 OMP_CLAUSE_CHAIN (clause) = nc; 8759 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 8760 gimplify_omp_ctxp = ctx->outer_context; 8761 lang_hooks.decls.omp_finish_clause (nc, pre_p); 8762 gimplify_omp_ctxp = ctx; 8763 } 8764 *list_p = clause; 8765 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 8766 gimplify_omp_ctxp = ctx->outer_context; 8767 lang_hooks.decls.omp_finish_clause (clause, pre_p); 8768 if (gimplify_omp_ctxp) 8769 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause)) 8770 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP 8771 && DECL_P (OMP_CLAUSE_SIZE (clause))) 8772 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause), 8773 true); 8774 gimplify_omp_ctxp = ctx; 8775 return 0; 8776 } 8777 8778 static void 8779 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p, 8780 enum tree_code code) 8781 { 8782 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 8783 tree c, decl; 8784 8785 if (body) 8786 { 8787 struct gimplify_omp_ctx *octx; 8788 for (octx = ctx; octx; octx = octx->outer_context) 8789 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0) 8790 break; 8791 if (octx) 8792 { 8793 struct walk_stmt_info wi; 8794 memset (&wi, 0, sizeof (wi)); 8795 walk_gimple_seq (body, omp_find_stores_stmt, 8796 omp_find_stores_op, &wi); 8797 } 8798 } 8799 while ((c = *list_p) != NULL) 8800 { 8801 splay_tree_node n; 8802 bool remove = false; 8803 8804 switch (OMP_CLAUSE_CODE (c)) 8805 { 8806 case OMP_CLAUSE_FIRSTPRIVATE: 8807 if ((ctx->region_type & ORT_TARGET) 8808 && (ctx->region_type & ORT_ACC) == 0 8809 && TYPE_ATOMIC (strip_array_types 8810 (TREE_TYPE (OMP_CLAUSE_DECL (c))))) 8811 { 8812 error_at (OMP_CLAUSE_LOCATION (c), 8813 "%<_Atomic%> %qD in %<firstprivate%> clause on " 8814 "%<target%> construct", OMP_CLAUSE_DECL (c)); 8815 remove = true; 8816 break; 8817 } 8818 /* FALLTHRU */ 8819 case OMP_CLAUSE_PRIVATE: 8820 case OMP_CLAUSE_SHARED: 8821 case OMP_CLAUSE_LINEAR: 8822 decl = OMP_CLAUSE_DECL (c); 8823 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 8824 remove = !(n->value & GOVD_SEEN); 8825 if (! remove) 8826 { 8827 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED; 8828 if ((n->value & GOVD_DEBUG_PRIVATE) 8829 || lang_hooks.decls.omp_private_debug_clause (decl, shared)) 8830 { 8831 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0 8832 || ((n->value & GOVD_DATA_SHARE_CLASS) 8833 == GOVD_SHARED)); 8834 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE); 8835 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1; 8836 } 8837 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED 8838 && (n->value & GOVD_WRITTEN) == 0 8839 && DECL_P (decl) 8840 && omp_shared_to_firstprivate_optimizable_decl_p (decl)) 8841 OMP_CLAUSE_SHARED_READONLY (c) = 1; 8842 else if (DECL_P (decl) 8843 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED 8844 && (n->value & GOVD_WRITTEN) != 1) 8845 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 8846 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))) 8847 && omp_shared_to_firstprivate_optimizable_decl_p (decl)) 8848 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl); 8849 } 8850 break; 8851 8852 case OMP_CLAUSE_LASTPRIVATE: 8853 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to 8854 accurately reflect the presence of a FIRSTPRIVATE clause. */ 8855 decl = OMP_CLAUSE_DECL (c); 8856 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 8857 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) 8858 = (n->value & GOVD_FIRSTPRIVATE) != 0; 8859 if (code == OMP_DISTRIBUTE 8860 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)) 8861 { 8862 remove = true; 8863 error_at (OMP_CLAUSE_LOCATION (c), 8864 "same variable used in %<firstprivate%> and " 8865 "%<lastprivate%> clauses on %<distribute%> " 8866 "construct"); 8867 } 8868 if (!remove 8869 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 8870 && DECL_P (decl) 8871 && omp_shared_to_firstprivate_optimizable_decl_p (decl)) 8872 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl); 8873 break; 8874 8875 case OMP_CLAUSE_ALIGNED: 8876 decl = OMP_CLAUSE_DECL (c); 8877 if (!is_global_var (decl)) 8878 { 8879 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 8880 remove = n == NULL || !(n->value & GOVD_SEEN); 8881 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE) 8882 { 8883 struct gimplify_omp_ctx *octx; 8884 if (n != NULL 8885 && (n->value & (GOVD_DATA_SHARE_CLASS 8886 & ~GOVD_FIRSTPRIVATE))) 8887 remove = true; 8888 else 8889 for (octx = ctx->outer_context; octx; 8890 octx = octx->outer_context) 8891 { 8892 n = splay_tree_lookup (octx->variables, 8893 (splay_tree_key) decl); 8894 if (n == NULL) 8895 continue; 8896 if (n->value & GOVD_LOCAL) 8897 break; 8898 /* We have to avoid assigning a shared variable 8899 to itself when trying to add 8900 __builtin_assume_aligned. */ 8901 if (n->value & GOVD_SHARED) 8902 { 8903 remove = true; 8904 break; 8905 } 8906 } 8907 } 8908 } 8909 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) 8910 { 8911 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 8912 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0) 8913 remove = true; 8914 } 8915 break; 8916 8917 case OMP_CLAUSE_MAP: 8918 if (code == OMP_TARGET_EXIT_DATA 8919 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER) 8920 { 8921 remove = true; 8922 break; 8923 } 8924 decl = OMP_CLAUSE_DECL (c); 8925 /* Data clauses associated with acc parallel reductions must be 8926 compatible with present_or_copy. Warn and adjust the clause 8927 if that is not the case. */ 8928 if (ctx->region_type == ORT_ACC_PARALLEL) 8929 { 8930 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0); 8931 n = NULL; 8932 8933 if (DECL_P (t)) 8934 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t); 8935 8936 if (n && (n->value & GOVD_REDUCTION)) 8937 { 8938 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c); 8939 8940 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1; 8941 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM 8942 && kind != GOMP_MAP_FORCE_PRESENT 8943 && kind != GOMP_MAP_POINTER) 8944 { 8945 warning_at (OMP_CLAUSE_LOCATION (c), 0, 8946 "incompatible data clause with reduction " 8947 "on %qE; promoting to present_or_copy", 8948 DECL_NAME (t)); 8949 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM); 8950 } 8951 } 8952 } 8953 if (!DECL_P (decl)) 8954 { 8955 if ((ctx->region_type & ORT_TARGET) != 0 8956 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER) 8957 { 8958 if (TREE_CODE (decl) == INDIRECT_REF 8959 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF 8960 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0))) 8961 == REFERENCE_TYPE)) 8962 decl = TREE_OPERAND (decl, 0); 8963 if (TREE_CODE (decl) == COMPONENT_REF) 8964 { 8965 while (TREE_CODE (decl) == COMPONENT_REF) 8966 decl = TREE_OPERAND (decl, 0); 8967 if (DECL_P (decl)) 8968 { 8969 n = splay_tree_lookup (ctx->variables, 8970 (splay_tree_key) decl); 8971 if (!(n->value & GOVD_SEEN)) 8972 remove = true; 8973 } 8974 } 8975 } 8976 break; 8977 } 8978 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 8979 if ((ctx->region_type & ORT_TARGET) != 0 8980 && !(n->value & GOVD_SEEN) 8981 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0 8982 && (!is_global_var (decl) 8983 || !lookup_attribute ("omp declare target link", 8984 DECL_ATTRIBUTES (decl)))) 8985 { 8986 remove = true; 8987 /* For struct element mapping, if struct is never referenced 8988 in target block and none of the mapping has always modifier, 8989 remove all the struct element mappings, which immediately 8990 follow the GOMP_MAP_STRUCT map clause. */ 8991 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT) 8992 { 8993 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c)); 8994 while (cnt--) 8995 OMP_CLAUSE_CHAIN (c) 8996 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c)); 8997 } 8998 } 8999 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT 9000 && code == OMP_TARGET_EXIT_DATA) 9001 remove = true; 9002 else if (DECL_SIZE (decl) 9003 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST 9004 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER 9005 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER 9006 && (OMP_CLAUSE_MAP_KIND (c) 9007 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)) 9008 { 9009 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because 9010 for these, TREE_CODE (DECL_SIZE (decl)) will always be 9011 INTEGER_CST. */ 9012 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR); 9013 9014 tree decl2 = DECL_VALUE_EXPR (decl); 9015 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); 9016 decl2 = TREE_OPERAND (decl2, 0); 9017 gcc_assert (DECL_P (decl2)); 9018 tree mem = build_simple_mem_ref (decl2); 9019 OMP_CLAUSE_DECL (c) = mem; 9020 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl)); 9021 if (ctx->outer_context) 9022 { 9023 omp_notice_variable (ctx->outer_context, decl2, true); 9024 omp_notice_variable (ctx->outer_context, 9025 OMP_CLAUSE_SIZE (c), true); 9026 } 9027 if (((ctx->region_type & ORT_TARGET) != 0 9028 || !ctx->target_firstprivatize_array_bases) 9029 && ((n->value & GOVD_SEEN) == 0 9030 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0)) 9031 { 9032 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c), 9033 OMP_CLAUSE_MAP); 9034 OMP_CLAUSE_DECL (nc) = decl; 9035 OMP_CLAUSE_SIZE (nc) = size_zero_node; 9036 if (ctx->target_firstprivatize_array_bases) 9037 OMP_CLAUSE_SET_MAP_KIND (nc, 9038 GOMP_MAP_FIRSTPRIVATE_POINTER); 9039 else 9040 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER); 9041 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c); 9042 OMP_CLAUSE_CHAIN (c) = nc; 9043 c = nc; 9044 } 9045 } 9046 else 9047 { 9048 if (OMP_CLAUSE_SIZE (c) == NULL_TREE) 9049 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl); 9050 gcc_assert ((n->value & GOVD_SEEN) == 0 9051 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) 9052 == 0)); 9053 } 9054 break; 9055 9056 case OMP_CLAUSE_TO: 9057 case OMP_CLAUSE_FROM: 9058 case OMP_CLAUSE__CACHE_: 9059 decl = OMP_CLAUSE_DECL (c); 9060 if (!DECL_P (decl)) 9061 break; 9062 if (DECL_SIZE (decl) 9063 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 9064 { 9065 tree decl2 = DECL_VALUE_EXPR (decl); 9066 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); 9067 decl2 = TREE_OPERAND (decl2, 0); 9068 gcc_assert (DECL_P (decl2)); 9069 tree mem = build_simple_mem_ref (decl2); 9070 OMP_CLAUSE_DECL (c) = mem; 9071 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl)); 9072 if (ctx->outer_context) 9073 { 9074 omp_notice_variable (ctx->outer_context, decl2, true); 9075 omp_notice_variable (ctx->outer_context, 9076 OMP_CLAUSE_SIZE (c), true); 9077 } 9078 } 9079 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE) 9080 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl); 9081 break; 9082 9083 case OMP_CLAUSE_REDUCTION: 9084 decl = OMP_CLAUSE_DECL (c); 9085 /* OpenACC reductions need a present_or_copy data clause. 9086 Add one if necessary. Error is the reduction is private. */ 9087 if (ctx->region_type == ORT_ACC_PARALLEL) 9088 { 9089 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 9090 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) 9091 error_at (OMP_CLAUSE_LOCATION (c), "invalid private " 9092 "reduction on %qE", DECL_NAME (decl)); 9093 else if ((n->value & GOVD_MAP) == 0) 9094 { 9095 tree next = OMP_CLAUSE_CHAIN (c); 9096 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP); 9097 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM); 9098 OMP_CLAUSE_DECL (nc) = decl; 9099 OMP_CLAUSE_CHAIN (c) = nc; 9100 lang_hooks.decls.omp_finish_clause (nc, pre_p); 9101 while (1) 9102 { 9103 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1; 9104 if (OMP_CLAUSE_CHAIN (nc) == NULL) 9105 break; 9106 nc = OMP_CLAUSE_CHAIN (nc); 9107 } 9108 OMP_CLAUSE_CHAIN (nc) = next; 9109 n->value |= GOVD_MAP; 9110 } 9111 } 9112 if (DECL_P (decl) 9113 && omp_shared_to_firstprivate_optimizable_decl_p (decl)) 9114 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl); 9115 break; 9116 case OMP_CLAUSE_COPYIN: 9117 case OMP_CLAUSE_COPYPRIVATE: 9118 case OMP_CLAUSE_IF: 9119 case OMP_CLAUSE_NUM_THREADS: 9120 case OMP_CLAUSE_NUM_TEAMS: 9121 case OMP_CLAUSE_THREAD_LIMIT: 9122 case OMP_CLAUSE_DIST_SCHEDULE: 9123 case OMP_CLAUSE_DEVICE: 9124 case OMP_CLAUSE_SCHEDULE: 9125 case OMP_CLAUSE_NOWAIT: 9126 case OMP_CLAUSE_ORDERED: 9127 case OMP_CLAUSE_DEFAULT: 9128 case OMP_CLAUSE_UNTIED: 9129 case OMP_CLAUSE_COLLAPSE: 9130 case OMP_CLAUSE_FINAL: 9131 case OMP_CLAUSE_MERGEABLE: 9132 case OMP_CLAUSE_PROC_BIND: 9133 case OMP_CLAUSE_SAFELEN: 9134 case OMP_CLAUSE_SIMDLEN: 9135 case OMP_CLAUSE_DEPEND: 9136 case OMP_CLAUSE_PRIORITY: 9137 case OMP_CLAUSE_GRAINSIZE: 9138 case OMP_CLAUSE_NUM_TASKS: 9139 case OMP_CLAUSE_NOGROUP: 9140 case OMP_CLAUSE_THREADS: 9141 case OMP_CLAUSE_SIMD: 9142 case OMP_CLAUSE_HINT: 9143 case OMP_CLAUSE_DEFAULTMAP: 9144 case OMP_CLAUSE_USE_DEVICE_PTR: 9145 case OMP_CLAUSE_IS_DEVICE_PTR: 9146 case OMP_CLAUSE__CILK_FOR_COUNT_: 9147 case OMP_CLAUSE_ASYNC: 9148 case OMP_CLAUSE_WAIT: 9149 case OMP_CLAUSE_INDEPENDENT: 9150 case OMP_CLAUSE_NUM_GANGS: 9151 case OMP_CLAUSE_NUM_WORKERS: 9152 case OMP_CLAUSE_VECTOR_LENGTH: 9153 case OMP_CLAUSE_GANG: 9154 case OMP_CLAUSE_WORKER: 9155 case OMP_CLAUSE_VECTOR: 9156 case OMP_CLAUSE_AUTO: 9157 case OMP_CLAUSE_SEQ: 9158 case OMP_CLAUSE_TILE: 9159 break; 9160 9161 default: 9162 gcc_unreachable (); 9163 } 9164 9165 if (remove) 9166 *list_p = OMP_CLAUSE_CHAIN (c); 9167 else 9168 list_p = &OMP_CLAUSE_CHAIN (c); 9169 } 9170 9171 /* Add in any implicit data sharing. */ 9172 struct gimplify_adjust_omp_clauses_data data; 9173 data.list_p = list_p; 9174 data.pre_p = pre_p; 9175 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data); 9176 9177 gimplify_omp_ctxp = ctx->outer_context; 9178 delete_omp_context (ctx); 9179 } 9180 9181 /* Gimplify OACC_CACHE. */ 9182 9183 static void 9184 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p) 9185 { 9186 tree expr = *expr_p; 9187 9188 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC, 9189 OACC_CACHE); 9190 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr), 9191 OACC_CACHE); 9192 9193 /* TODO: Do something sensible with this information. */ 9194 9195 *expr_p = NULL_TREE; 9196 } 9197 9198 /* Helper function of gimplify_oacc_declare. The helper's purpose is to, 9199 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit' 9200 kind. The entry kind will replace the one in CLAUSE, while the exit 9201 kind will be used in a new omp_clause and returned to the caller. */ 9202 9203 static tree 9204 gimplify_oacc_declare_1 (tree clause) 9205 { 9206 HOST_WIDE_INT kind, new_op; 9207 bool ret = false; 9208 tree c = NULL; 9209 9210 kind = OMP_CLAUSE_MAP_KIND (clause); 9211 9212 switch (kind) 9213 { 9214 case GOMP_MAP_ALLOC: 9215 case GOMP_MAP_FORCE_ALLOC: 9216 case GOMP_MAP_FORCE_TO: 9217 new_op = GOMP_MAP_DELETE; 9218 ret = true; 9219 break; 9220 9221 case GOMP_MAP_FORCE_FROM: 9222 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC); 9223 new_op = GOMP_MAP_FORCE_FROM; 9224 ret = true; 9225 break; 9226 9227 case GOMP_MAP_FORCE_TOFROM: 9228 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO); 9229 new_op = GOMP_MAP_FORCE_FROM; 9230 ret = true; 9231 break; 9232 9233 case GOMP_MAP_FROM: 9234 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC); 9235 new_op = GOMP_MAP_FROM; 9236 ret = true; 9237 break; 9238 9239 case GOMP_MAP_TOFROM: 9240 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO); 9241 new_op = GOMP_MAP_FROM; 9242 ret = true; 9243 break; 9244 9245 case GOMP_MAP_DEVICE_RESIDENT: 9246 case GOMP_MAP_FORCE_DEVICEPTR: 9247 case GOMP_MAP_FORCE_PRESENT: 9248 case GOMP_MAP_LINK: 9249 case GOMP_MAP_POINTER: 9250 case GOMP_MAP_TO: 9251 break; 9252 9253 default: 9254 gcc_unreachable (); 9255 break; 9256 } 9257 9258 if (ret) 9259 { 9260 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP); 9261 OMP_CLAUSE_SET_MAP_KIND (c, new_op); 9262 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause); 9263 } 9264 9265 return c; 9266 } 9267 9268 /* Gimplify OACC_DECLARE. */ 9269 9270 static void 9271 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p) 9272 { 9273 tree expr = *expr_p; 9274 gomp_target *stmt; 9275 tree clauses, t, decl; 9276 9277 clauses = OACC_DECLARE_CLAUSES (expr); 9278 9279 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE); 9280 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE); 9281 9282 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t)) 9283 { 9284 decl = OMP_CLAUSE_DECL (t); 9285 9286 if (TREE_CODE (decl) == MEM_REF) 9287 decl = TREE_OPERAND (decl, 0); 9288 9289 if (VAR_P (decl) && !is_oacc_declared (decl)) 9290 { 9291 tree attr = get_identifier ("oacc declare target"); 9292 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE, 9293 DECL_ATTRIBUTES (decl)); 9294 } 9295 9296 if (VAR_P (decl) 9297 && !is_global_var (decl) 9298 && DECL_CONTEXT (decl) == current_function_decl) 9299 { 9300 tree c = gimplify_oacc_declare_1 (t); 9301 if (c) 9302 { 9303 if (oacc_declare_returns == NULL) 9304 oacc_declare_returns = new hash_map<tree, tree>; 9305 9306 oacc_declare_returns->put (decl, c); 9307 } 9308 } 9309 9310 if (gimplify_omp_ctxp) 9311 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN); 9312 } 9313 9314 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE, 9315 clauses); 9316 9317 gimplify_seq_add_stmt (pre_p, stmt); 9318 9319 *expr_p = NULL_TREE; 9320 } 9321 9322 /* Gimplify the contents of an OMP_PARALLEL statement. This involves 9323 gimplification of the body, as well as scanning the body for used 9324 variables. We need to do this scan now, because variable-sized 9325 decls will be decomposed during gimplification. */ 9326 9327 static void 9328 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p) 9329 { 9330 tree expr = *expr_p; 9331 gimple *g; 9332 gimple_seq body = NULL; 9333 9334 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, 9335 OMP_PARALLEL_COMBINED (expr) 9336 ? ORT_COMBINED_PARALLEL 9337 : ORT_PARALLEL, OMP_PARALLEL); 9338 9339 push_gimplify_context (); 9340 9341 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body); 9342 if (gimple_code (g) == GIMPLE_BIND) 9343 pop_gimplify_context (g); 9344 else 9345 pop_gimplify_context (NULL); 9346 9347 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr), 9348 OMP_PARALLEL); 9349 9350 g = gimple_build_omp_parallel (body, 9351 OMP_PARALLEL_CLAUSES (expr), 9352 NULL_TREE, NULL_TREE); 9353 if (OMP_PARALLEL_COMBINED (expr)) 9354 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED); 9355 gimplify_seq_add_stmt (pre_p, g); 9356 *expr_p = NULL_TREE; 9357 } 9358 9359 /* Gimplify the contents of an OMP_TASK statement. This involves 9360 gimplification of the body, as well as scanning the body for used 9361 variables. We need to do this scan now, because variable-sized 9362 decls will be decomposed during gimplification. */ 9363 9364 static void 9365 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p) 9366 { 9367 tree expr = *expr_p; 9368 gimple *g; 9369 gimple_seq body = NULL; 9370 9371 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, 9372 omp_find_clause (OMP_TASK_CLAUSES (expr), 9373 OMP_CLAUSE_UNTIED) 9374 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK); 9375 9376 push_gimplify_context (); 9377 9378 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body); 9379 if (gimple_code (g) == GIMPLE_BIND) 9380 pop_gimplify_context (g); 9381 else 9382 pop_gimplify_context (NULL); 9383 9384 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr), 9385 OMP_TASK); 9386 9387 g = gimple_build_omp_task (body, 9388 OMP_TASK_CLAUSES (expr), 9389 NULL_TREE, NULL_TREE, 9390 NULL_TREE, NULL_TREE, NULL_TREE); 9391 gimplify_seq_add_stmt (pre_p, g); 9392 *expr_p = NULL_TREE; 9393 } 9394 9395 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD 9396 with non-NULL OMP_FOR_INIT. */ 9397 9398 static tree 9399 find_combined_omp_for (tree *tp, int *walk_subtrees, void *) 9400 { 9401 *walk_subtrees = 0; 9402 switch (TREE_CODE (*tp)) 9403 { 9404 case OMP_FOR: 9405 *walk_subtrees = 1; 9406 /* FALLTHRU */ 9407 case OMP_SIMD: 9408 if (OMP_FOR_INIT (*tp) != NULL_TREE) 9409 return *tp; 9410 break; 9411 case BIND_EXPR: 9412 case STATEMENT_LIST: 9413 case OMP_PARALLEL: 9414 *walk_subtrees = 1; 9415 break; 9416 default: 9417 break; 9418 } 9419 return NULL_TREE; 9420 } 9421 9422 /* Gimplify the gross structure of an OMP_FOR statement. */ 9423 9424 static enum gimplify_status 9425 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p) 9426 { 9427 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t; 9428 enum gimplify_status ret = GS_ALL_DONE; 9429 enum gimplify_status tret; 9430 gomp_for *gfor; 9431 gimple_seq for_body, for_pre_body; 9432 int i; 9433 bitmap has_decl_expr = NULL; 9434 enum omp_region_type ort = ORT_WORKSHARE; 9435 9436 orig_for_stmt = for_stmt = *expr_p; 9437 9438 switch (TREE_CODE (for_stmt)) 9439 { 9440 case OMP_FOR: 9441 case CILK_FOR: 9442 case OMP_DISTRIBUTE: 9443 break; 9444 case OACC_LOOP: 9445 ort = ORT_ACC; 9446 break; 9447 case OMP_TASKLOOP: 9448 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED)) 9449 ort = ORT_UNTIED_TASK; 9450 else 9451 ort = ORT_TASK; 9452 break; 9453 case OMP_SIMD: 9454 case CILK_SIMD: 9455 ort = ORT_SIMD; 9456 break; 9457 default: 9458 gcc_unreachable (); 9459 } 9460 9461 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear 9462 clause for the IV. */ 9463 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1) 9464 { 9465 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0); 9466 gcc_assert (TREE_CODE (t) == MODIFY_EXPR); 9467 decl = TREE_OPERAND (t, 0); 9468 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c)) 9469 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 9470 && OMP_CLAUSE_DECL (c) == decl) 9471 { 9472 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1; 9473 break; 9474 } 9475 } 9476 9477 if (OMP_FOR_INIT (for_stmt) == NULL_TREE) 9478 { 9479 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP); 9480 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), 9481 find_combined_omp_for, NULL, NULL); 9482 if (inner_for_stmt == NULL_TREE) 9483 { 9484 gcc_assert (seen_error ()); 9485 *expr_p = NULL_TREE; 9486 return GS_ERROR; 9487 } 9488 } 9489 9490 if (TREE_CODE (for_stmt) != OMP_TASKLOOP) 9491 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort, 9492 TREE_CODE (for_stmt)); 9493 9494 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE) 9495 gimplify_omp_ctxp->distribute = true; 9496 9497 /* Handle OMP_FOR_INIT. */ 9498 for_pre_body = NULL; 9499 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt)) 9500 { 9501 has_decl_expr = BITMAP_ALLOC (NULL); 9502 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR 9503 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt))) 9504 == VAR_DECL) 9505 { 9506 t = OMP_FOR_PRE_BODY (for_stmt); 9507 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t))); 9508 } 9509 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST) 9510 { 9511 tree_stmt_iterator si; 9512 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si); 9513 tsi_next (&si)) 9514 { 9515 t = tsi_stmt (si); 9516 if (TREE_CODE (t) == DECL_EXPR 9517 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL) 9518 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t))); 9519 } 9520 } 9521 } 9522 if (OMP_FOR_PRE_BODY (for_stmt)) 9523 { 9524 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp) 9525 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body); 9526 else 9527 { 9528 struct gimplify_omp_ctx ctx; 9529 memset (&ctx, 0, sizeof (ctx)); 9530 ctx.region_type = ORT_NONE; 9531 gimplify_omp_ctxp = &ctx; 9532 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body); 9533 gimplify_omp_ctxp = NULL; 9534 } 9535 } 9536 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE; 9537 9538 if (OMP_FOR_INIT (for_stmt) == NULL_TREE) 9539 for_stmt = inner_for_stmt; 9540 9541 /* For taskloop, need to gimplify the start, end and step before the 9542 taskloop, outside of the taskloop omp context. */ 9543 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP) 9544 { 9545 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) 9546 { 9547 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); 9548 if (!is_gimple_constant (TREE_OPERAND (t, 1))) 9549 { 9550 tree type = TREE_TYPE (TREE_OPERAND (t, 0)); 9551 TREE_OPERAND (t, 1) 9552 = get_initialized_tmp_var (TREE_OPERAND (t, 1), 9553 gimple_seq_empty_p (for_pre_body) 9554 ? pre_p : &for_pre_body, NULL, 9555 false); 9556 /* Reference to pointer conversion is considered useless, 9557 but is significant for firstprivate clause. Force it 9558 here. */ 9559 if (TREE_CODE (type) == POINTER_TYPE 9560 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1))) 9561 == REFERENCE_TYPE)) 9562 { 9563 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type)); 9564 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, 9565 TREE_OPERAND (t, 1)); 9566 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body) 9567 ? pre_p : &for_pre_body); 9568 TREE_OPERAND (t, 1) = v; 9569 } 9570 tree c = build_omp_clause (input_location, 9571 OMP_CLAUSE_FIRSTPRIVATE); 9572 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1); 9573 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt); 9574 OMP_FOR_CLAUSES (orig_for_stmt) = c; 9575 } 9576 9577 /* Handle OMP_FOR_COND. */ 9578 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i); 9579 if (!is_gimple_constant (TREE_OPERAND (t, 1))) 9580 { 9581 tree type = TREE_TYPE (TREE_OPERAND (t, 0)); 9582 TREE_OPERAND (t, 1) 9583 = get_initialized_tmp_var (TREE_OPERAND (t, 1), 9584 gimple_seq_empty_p (for_pre_body) 9585 ? pre_p : &for_pre_body, NULL, 9586 false); 9587 /* Reference to pointer conversion is considered useless, 9588 but is significant for firstprivate clause. Force it 9589 here. */ 9590 if (TREE_CODE (type) == POINTER_TYPE 9591 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1))) 9592 == REFERENCE_TYPE)) 9593 { 9594 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type)); 9595 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, 9596 TREE_OPERAND (t, 1)); 9597 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body) 9598 ? pre_p : &for_pre_body); 9599 TREE_OPERAND (t, 1) = v; 9600 } 9601 tree c = build_omp_clause (input_location, 9602 OMP_CLAUSE_FIRSTPRIVATE); 9603 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1); 9604 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt); 9605 OMP_FOR_CLAUSES (orig_for_stmt) = c; 9606 } 9607 9608 /* Handle OMP_FOR_INCR. */ 9609 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 9610 if (TREE_CODE (t) == MODIFY_EXPR) 9611 { 9612 decl = TREE_OPERAND (t, 0); 9613 t = TREE_OPERAND (t, 1); 9614 tree *tp = &TREE_OPERAND (t, 1); 9615 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl) 9616 tp = &TREE_OPERAND (t, 0); 9617 9618 if (!is_gimple_constant (*tp)) 9619 { 9620 gimple_seq *seq = gimple_seq_empty_p (for_pre_body) 9621 ? pre_p : &for_pre_body; 9622 *tp = get_initialized_tmp_var (*tp, seq, NULL, false); 9623 tree c = build_omp_clause (input_location, 9624 OMP_CLAUSE_FIRSTPRIVATE); 9625 OMP_CLAUSE_DECL (c) = *tp; 9626 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt); 9627 OMP_FOR_CLAUSES (orig_for_stmt) = c; 9628 } 9629 } 9630 } 9631 9632 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort, 9633 OMP_TASKLOOP); 9634 } 9635 9636 if (orig_for_stmt != for_stmt) 9637 gimplify_omp_ctxp->combined_loop = true; 9638 9639 for_body = NULL; 9640 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) 9641 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt))); 9642 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) 9643 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt))); 9644 9645 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED); 9646 bool is_doacross = false; 9647 if (c && OMP_CLAUSE_ORDERED_EXPR (c)) 9648 { 9649 is_doacross = true; 9650 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH 9651 (OMP_FOR_INIT (for_stmt)) 9652 * 2); 9653 } 9654 int collapse = 1, tile = 0; 9655 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE); 9656 if (c) 9657 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c)); 9658 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE); 9659 if (c) 9660 tile = list_length (OMP_CLAUSE_TILE_LIST (c)); 9661 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) 9662 { 9663 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); 9664 gcc_assert (TREE_CODE (t) == MODIFY_EXPR); 9665 decl = TREE_OPERAND (t, 0); 9666 gcc_assert (DECL_P (decl)); 9667 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)) 9668 || POINTER_TYPE_P (TREE_TYPE (decl))); 9669 if (is_doacross) 9670 { 9671 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt)) 9672 gimplify_omp_ctxp->loop_iter_var.quick_push 9673 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i)); 9674 else 9675 gimplify_omp_ctxp->loop_iter_var.quick_push (decl); 9676 gimplify_omp_ctxp->loop_iter_var.quick_push (decl); 9677 } 9678 9679 /* Make sure the iteration variable is private. */ 9680 tree c = NULL_TREE; 9681 tree c2 = NULL_TREE; 9682 if (orig_for_stmt != for_stmt) 9683 /* Do this only on innermost construct for combined ones. */; 9684 else if (ort == ORT_SIMD) 9685 { 9686 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables, 9687 (splay_tree_key) decl); 9688 omp_is_private (gimplify_omp_ctxp, decl, 9689 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) 9690 != 1)); 9691 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0) 9692 omp_notice_variable (gimplify_omp_ctxp, decl, true); 9693 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1) 9694 { 9695 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR); 9696 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1; 9697 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN; 9698 if (has_decl_expr 9699 && bitmap_bit_p (has_decl_expr, DECL_UID (decl))) 9700 { 9701 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1; 9702 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER; 9703 } 9704 struct gimplify_omp_ctx *outer 9705 = gimplify_omp_ctxp->outer_context; 9706 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 9707 { 9708 if (outer->region_type == ORT_WORKSHARE 9709 && outer->combined_loop) 9710 { 9711 n = splay_tree_lookup (outer->variables, 9712 (splay_tree_key)decl); 9713 if (n != NULL && (n->value & GOVD_LOCAL) != 0) 9714 { 9715 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1; 9716 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER; 9717 } 9718 else 9719 { 9720 struct gimplify_omp_ctx *octx = outer->outer_context; 9721 if (octx 9722 && octx->region_type == ORT_COMBINED_PARALLEL 9723 && octx->outer_context 9724 && (octx->outer_context->region_type 9725 == ORT_WORKSHARE) 9726 && octx->outer_context->combined_loop) 9727 { 9728 octx = octx->outer_context; 9729 n = splay_tree_lookup (octx->variables, 9730 (splay_tree_key)decl); 9731 if (n != NULL && (n->value & GOVD_LOCAL) != 0) 9732 { 9733 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1; 9734 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER; 9735 } 9736 } 9737 } 9738 } 9739 } 9740 9741 OMP_CLAUSE_DECL (c) = decl; 9742 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt); 9743 OMP_FOR_CLAUSES (for_stmt) = c; 9744 omp_add_variable (gimplify_omp_ctxp, decl, flags); 9745 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 9746 { 9747 if (outer->region_type == ORT_WORKSHARE 9748 && outer->combined_loop) 9749 { 9750 if (outer->outer_context 9751 && (outer->outer_context->region_type 9752 == ORT_COMBINED_PARALLEL)) 9753 outer = outer->outer_context; 9754 else if (omp_check_private (outer, decl, false)) 9755 outer = NULL; 9756 } 9757 else if (((outer->region_type & ORT_TASK) != 0) 9758 && outer->combined_loop 9759 && !omp_check_private (gimplify_omp_ctxp, 9760 decl, false)) 9761 ; 9762 else if (outer->region_type != ORT_COMBINED_PARALLEL) 9763 { 9764 omp_notice_variable (outer, decl, true); 9765 outer = NULL; 9766 } 9767 if (outer) 9768 { 9769 n = splay_tree_lookup (outer->variables, 9770 (splay_tree_key)decl); 9771 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0) 9772 { 9773 omp_add_variable (outer, decl, 9774 GOVD_LASTPRIVATE | GOVD_SEEN); 9775 if (outer->region_type == ORT_COMBINED_PARALLEL 9776 && outer->outer_context 9777 && (outer->outer_context->region_type 9778 == ORT_WORKSHARE) 9779 && outer->outer_context->combined_loop) 9780 { 9781 outer = outer->outer_context; 9782 n = splay_tree_lookup (outer->variables, 9783 (splay_tree_key)decl); 9784 if (omp_check_private (outer, decl, false)) 9785 outer = NULL; 9786 else if (n == NULL 9787 || ((n->value & GOVD_DATA_SHARE_CLASS) 9788 == 0)) 9789 omp_add_variable (outer, decl, 9790 GOVD_LASTPRIVATE 9791 | GOVD_SEEN); 9792 else 9793 outer = NULL; 9794 } 9795 if (outer && outer->outer_context 9796 && (outer->outer_context->region_type 9797 == ORT_COMBINED_TEAMS)) 9798 { 9799 outer = outer->outer_context; 9800 n = splay_tree_lookup (outer->variables, 9801 (splay_tree_key)decl); 9802 if (n == NULL 9803 || (n->value & GOVD_DATA_SHARE_CLASS) == 0) 9804 omp_add_variable (outer, decl, 9805 GOVD_SHARED | GOVD_SEEN); 9806 else 9807 outer = NULL; 9808 } 9809 if (outer && outer->outer_context) 9810 omp_notice_variable (outer->outer_context, decl, 9811 true); 9812 } 9813 } 9814 } 9815 } 9816 else 9817 { 9818 bool lastprivate 9819 = (!has_decl_expr 9820 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl))); 9821 struct gimplify_omp_ctx *outer 9822 = gimplify_omp_ctxp->outer_context; 9823 if (outer && lastprivate) 9824 { 9825 if (outer->region_type == ORT_WORKSHARE 9826 && outer->combined_loop) 9827 { 9828 n = splay_tree_lookup (outer->variables, 9829 (splay_tree_key)decl); 9830 if (n != NULL && (n->value & GOVD_LOCAL) != 0) 9831 { 9832 lastprivate = false; 9833 outer = NULL; 9834 } 9835 else if (outer->outer_context 9836 && (outer->outer_context->region_type 9837 == ORT_COMBINED_PARALLEL)) 9838 outer = outer->outer_context; 9839 else if (omp_check_private (outer, decl, false)) 9840 outer = NULL; 9841 } 9842 else if (((outer->region_type & ORT_TASK) != 0) 9843 && outer->combined_loop 9844 && !omp_check_private (gimplify_omp_ctxp, 9845 decl, false)) 9846 ; 9847 else if (outer->region_type != ORT_COMBINED_PARALLEL) 9848 { 9849 omp_notice_variable (outer, decl, true); 9850 outer = NULL; 9851 } 9852 if (outer) 9853 { 9854 n = splay_tree_lookup (outer->variables, 9855 (splay_tree_key)decl); 9856 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0) 9857 { 9858 omp_add_variable (outer, decl, 9859 GOVD_LASTPRIVATE | GOVD_SEEN); 9860 if (outer->region_type == ORT_COMBINED_PARALLEL 9861 && outer->outer_context 9862 && (outer->outer_context->region_type 9863 == ORT_WORKSHARE) 9864 && outer->outer_context->combined_loop) 9865 { 9866 outer = outer->outer_context; 9867 n = splay_tree_lookup (outer->variables, 9868 (splay_tree_key)decl); 9869 if (omp_check_private (outer, decl, false)) 9870 outer = NULL; 9871 else if (n == NULL 9872 || ((n->value & GOVD_DATA_SHARE_CLASS) 9873 == 0)) 9874 omp_add_variable (outer, decl, 9875 GOVD_LASTPRIVATE 9876 | GOVD_SEEN); 9877 else 9878 outer = NULL; 9879 } 9880 if (outer && outer->outer_context 9881 && (outer->outer_context->region_type 9882 == ORT_COMBINED_TEAMS)) 9883 { 9884 outer = outer->outer_context; 9885 n = splay_tree_lookup (outer->variables, 9886 (splay_tree_key)decl); 9887 if (n == NULL 9888 || (n->value & GOVD_DATA_SHARE_CLASS) == 0) 9889 omp_add_variable (outer, decl, 9890 GOVD_SHARED | GOVD_SEEN); 9891 else 9892 outer = NULL; 9893 } 9894 if (outer && outer->outer_context) 9895 omp_notice_variable (outer->outer_context, decl, 9896 true); 9897 } 9898 } 9899 } 9900 9901 c = build_omp_clause (input_location, 9902 lastprivate ? OMP_CLAUSE_LASTPRIVATE 9903 : OMP_CLAUSE_PRIVATE); 9904 OMP_CLAUSE_DECL (c) = decl; 9905 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt); 9906 OMP_FOR_CLAUSES (for_stmt) = c; 9907 omp_add_variable (gimplify_omp_ctxp, decl, 9908 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE) 9909 | GOVD_EXPLICIT | GOVD_SEEN); 9910 c = NULL_TREE; 9911 } 9912 } 9913 else if (omp_is_private (gimplify_omp_ctxp, decl, 0)) 9914 omp_notice_variable (gimplify_omp_ctxp, decl, true); 9915 else 9916 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN); 9917 9918 /* If DECL is not a gimple register, create a temporary variable to act 9919 as an iteration counter. This is valid, since DECL cannot be 9920 modified in the body of the loop. Similarly for any iteration vars 9921 in simd with collapse > 1 where the iterator vars must be 9922 lastprivate. */ 9923 if (orig_for_stmt != for_stmt) 9924 var = decl; 9925 else if (!is_gimple_reg (decl) 9926 || (ort == ORT_SIMD 9927 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)) 9928 { 9929 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 9930 /* Make sure omp_add_variable is not called on it prematurely. 9931 We call it ourselves a few lines later. */ 9932 gimplify_omp_ctxp = NULL; 9933 var = create_tmp_var (TREE_TYPE (decl), get_name (decl)); 9934 gimplify_omp_ctxp = ctx; 9935 TREE_OPERAND (t, 0) = var; 9936 9937 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var)); 9938 9939 if (ort == ORT_SIMD 9940 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1) 9941 { 9942 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR); 9943 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1; 9944 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1; 9945 OMP_CLAUSE_DECL (c2) = var; 9946 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt); 9947 OMP_FOR_CLAUSES (for_stmt) = c2; 9948 omp_add_variable (gimplify_omp_ctxp, var, 9949 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN); 9950 if (c == NULL_TREE) 9951 { 9952 c = c2; 9953 c2 = NULL_TREE; 9954 } 9955 } 9956 else 9957 omp_add_variable (gimplify_omp_ctxp, var, 9958 GOVD_PRIVATE | GOVD_SEEN); 9959 } 9960 else 9961 var = decl; 9962 9963 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, 9964 is_gimple_val, fb_rvalue, false); 9965 ret = MIN (ret, tret); 9966 if (ret == GS_ERROR) 9967 return ret; 9968 9969 /* Handle OMP_FOR_COND. */ 9970 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i); 9971 gcc_assert (COMPARISON_CLASS_P (t)); 9972 gcc_assert (TREE_OPERAND (t, 0) == decl); 9973 9974 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, 9975 is_gimple_val, fb_rvalue, false); 9976 ret = MIN (ret, tret); 9977 9978 /* Handle OMP_FOR_INCR. */ 9979 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 9980 switch (TREE_CODE (t)) 9981 { 9982 case PREINCREMENT_EXPR: 9983 case POSTINCREMENT_EXPR: 9984 { 9985 tree decl = TREE_OPERAND (t, 0); 9986 /* c_omp_for_incr_canonicalize_ptr() should have been 9987 called to massage things appropriately. */ 9988 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl))); 9989 9990 if (orig_for_stmt != for_stmt) 9991 break; 9992 t = build_int_cst (TREE_TYPE (decl), 1); 9993 if (c) 9994 OMP_CLAUSE_LINEAR_STEP (c) = t; 9995 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t); 9996 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t); 9997 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t; 9998 break; 9999 } 10000 10001 case PREDECREMENT_EXPR: 10002 case POSTDECREMENT_EXPR: 10003 /* c_omp_for_incr_canonicalize_ptr() should have been 10004 called to massage things appropriately. */ 10005 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl))); 10006 if (orig_for_stmt != for_stmt) 10007 break; 10008 t = build_int_cst (TREE_TYPE (decl), -1); 10009 if (c) 10010 OMP_CLAUSE_LINEAR_STEP (c) = t; 10011 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t); 10012 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t); 10013 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t; 10014 break; 10015 10016 case MODIFY_EXPR: 10017 gcc_assert (TREE_OPERAND (t, 0) == decl); 10018 TREE_OPERAND (t, 0) = var; 10019 10020 t = TREE_OPERAND (t, 1); 10021 switch (TREE_CODE (t)) 10022 { 10023 case PLUS_EXPR: 10024 if (TREE_OPERAND (t, 1) == decl) 10025 { 10026 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0); 10027 TREE_OPERAND (t, 0) = var; 10028 break; 10029 } 10030 10031 /* Fallthru. */ 10032 case MINUS_EXPR: 10033 case POINTER_PLUS_EXPR: 10034 gcc_assert (TREE_OPERAND (t, 0) == decl); 10035 TREE_OPERAND (t, 0) = var; 10036 break; 10037 default: 10038 gcc_unreachable (); 10039 } 10040 10041 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, 10042 is_gimple_val, fb_rvalue, false); 10043 ret = MIN (ret, tret); 10044 if (c) 10045 { 10046 tree step = TREE_OPERAND (t, 1); 10047 tree stept = TREE_TYPE (decl); 10048 if (POINTER_TYPE_P (stept)) 10049 stept = sizetype; 10050 step = fold_convert (stept, step); 10051 if (TREE_CODE (t) == MINUS_EXPR) 10052 step = fold_build1 (NEGATE_EXPR, stept, step); 10053 OMP_CLAUSE_LINEAR_STEP (c) = step; 10054 if (step != TREE_OPERAND (t, 1)) 10055 { 10056 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), 10057 &for_pre_body, NULL, 10058 is_gimple_val, fb_rvalue, false); 10059 ret = MIN (ret, tret); 10060 } 10061 } 10062 break; 10063 10064 default: 10065 gcc_unreachable (); 10066 } 10067 10068 if (c2) 10069 { 10070 gcc_assert (c); 10071 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c); 10072 } 10073 10074 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt) 10075 { 10076 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c)) 10077 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 10078 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL) 10079 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 10080 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c) 10081 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL)) 10082 && OMP_CLAUSE_DECL (c) == decl) 10083 { 10084 if (is_doacross && (collapse == 1 || i >= collapse)) 10085 t = var; 10086 else 10087 { 10088 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 10089 gcc_assert (TREE_CODE (t) == MODIFY_EXPR); 10090 gcc_assert (TREE_OPERAND (t, 0) == var); 10091 t = TREE_OPERAND (t, 1); 10092 gcc_assert (TREE_CODE (t) == PLUS_EXPR 10093 || TREE_CODE (t) == MINUS_EXPR 10094 || TREE_CODE (t) == POINTER_PLUS_EXPR); 10095 gcc_assert (TREE_OPERAND (t, 0) == var); 10096 t = build2 (TREE_CODE (t), TREE_TYPE (decl), 10097 is_doacross ? var : decl, 10098 TREE_OPERAND (t, 1)); 10099 } 10100 gimple_seq *seq; 10101 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE) 10102 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c); 10103 else 10104 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c); 10105 gimplify_assign (decl, t, seq); 10106 } 10107 } 10108 } 10109 10110 BITMAP_FREE (has_decl_expr); 10111 10112 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP) 10113 { 10114 push_gimplify_context (); 10115 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR) 10116 { 10117 OMP_FOR_BODY (orig_for_stmt) 10118 = build3 (BIND_EXPR, void_type_node, NULL, 10119 OMP_FOR_BODY (orig_for_stmt), NULL); 10120 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1; 10121 } 10122 } 10123 10124 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt), 10125 &for_body); 10126 10127 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP) 10128 { 10129 if (gimple_code (g) == GIMPLE_BIND) 10130 pop_gimplify_context (g); 10131 else 10132 pop_gimplify_context (NULL); 10133 } 10134 10135 if (orig_for_stmt != for_stmt) 10136 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) 10137 { 10138 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); 10139 decl = TREE_OPERAND (t, 0); 10140 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 10141 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP) 10142 gimplify_omp_ctxp = ctx->outer_context; 10143 var = create_tmp_var (TREE_TYPE (decl), get_name (decl)); 10144 gimplify_omp_ctxp = ctx; 10145 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN); 10146 TREE_OPERAND (t, 0) = var; 10147 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 10148 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1)); 10149 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var; 10150 } 10151 10152 gimplify_adjust_omp_clauses (pre_p, for_body, 10153 &OMP_FOR_CLAUSES (orig_for_stmt), 10154 TREE_CODE (orig_for_stmt)); 10155 10156 int kind; 10157 switch (TREE_CODE (orig_for_stmt)) 10158 { 10159 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break; 10160 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break; 10161 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break; 10162 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break; 10163 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break; 10164 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break; 10165 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break; 10166 default: 10167 gcc_unreachable (); 10168 } 10169 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt), 10170 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)), 10171 for_pre_body); 10172 if (orig_for_stmt != for_stmt) 10173 gimple_omp_for_set_combined_p (gfor, true); 10174 if (gimplify_omp_ctxp 10175 && (gimplify_omp_ctxp->combined_loop 10176 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL 10177 && gimplify_omp_ctxp->outer_context 10178 && gimplify_omp_ctxp->outer_context->combined_loop))) 10179 { 10180 gimple_omp_for_set_combined_into_p (gfor, true); 10181 if (gimplify_omp_ctxp->combined_loop) 10182 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD); 10183 else 10184 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR); 10185 } 10186 10187 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) 10188 { 10189 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); 10190 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0)); 10191 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1)); 10192 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i); 10193 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t)); 10194 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1)); 10195 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 10196 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1)); 10197 } 10198 10199 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop 10200 constructs with GIMPLE_OMP_TASK sandwiched in between them. 10201 The outer taskloop stands for computing the number of iterations, 10202 counts for collapsed loops and holding taskloop specific clauses. 10203 The task construct stands for the effect of data sharing on the 10204 explicit task it creates and the inner taskloop stands for expansion 10205 of the static loop inside of the explicit task construct. */ 10206 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP) 10207 { 10208 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor); 10209 tree task_clauses = NULL_TREE; 10210 tree c = *gfor_clauses_ptr; 10211 tree *gtask_clauses_ptr = &task_clauses; 10212 tree outer_for_clauses = NULL_TREE; 10213 tree *gforo_clauses_ptr = &outer_for_clauses; 10214 for (; c; c = OMP_CLAUSE_CHAIN (c)) 10215 switch (OMP_CLAUSE_CODE (c)) 10216 { 10217 /* These clauses are allowed on task, move them there. */ 10218 case OMP_CLAUSE_SHARED: 10219 case OMP_CLAUSE_FIRSTPRIVATE: 10220 case OMP_CLAUSE_DEFAULT: 10221 case OMP_CLAUSE_IF: 10222 case OMP_CLAUSE_UNTIED: 10223 case OMP_CLAUSE_FINAL: 10224 case OMP_CLAUSE_MERGEABLE: 10225 case OMP_CLAUSE_PRIORITY: 10226 *gtask_clauses_ptr = c; 10227 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c); 10228 break; 10229 case OMP_CLAUSE_PRIVATE: 10230 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c)) 10231 { 10232 /* We want private on outer for and firstprivate 10233 on task. */ 10234 *gtask_clauses_ptr 10235 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 10236 OMP_CLAUSE_FIRSTPRIVATE); 10237 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c); 10238 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL); 10239 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr); 10240 *gforo_clauses_ptr = c; 10241 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c); 10242 } 10243 else 10244 { 10245 *gtask_clauses_ptr = c; 10246 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c); 10247 } 10248 break; 10249 /* These clauses go into outer taskloop clauses. */ 10250 case OMP_CLAUSE_GRAINSIZE: 10251 case OMP_CLAUSE_NUM_TASKS: 10252 case OMP_CLAUSE_NOGROUP: 10253 *gforo_clauses_ptr = c; 10254 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c); 10255 break; 10256 /* Taskloop clause we duplicate on both taskloops. */ 10257 case OMP_CLAUSE_COLLAPSE: 10258 *gfor_clauses_ptr = c; 10259 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c); 10260 *gforo_clauses_ptr = copy_node (c); 10261 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr); 10262 break; 10263 /* For lastprivate, keep the clause on inner taskloop, and add 10264 a shared clause on task. If the same decl is also firstprivate, 10265 add also firstprivate clause on the inner taskloop. */ 10266 case OMP_CLAUSE_LASTPRIVATE: 10267 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c)) 10268 { 10269 /* For taskloop C++ lastprivate IVs, we want: 10270 1) private on outer taskloop 10271 2) firstprivate and shared on task 10272 3) lastprivate on inner taskloop */ 10273 *gtask_clauses_ptr 10274 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 10275 OMP_CLAUSE_FIRSTPRIVATE); 10276 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c); 10277 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL); 10278 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr); 10279 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1; 10280 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c), 10281 OMP_CLAUSE_PRIVATE); 10282 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c); 10283 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1; 10284 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c); 10285 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr); 10286 } 10287 *gfor_clauses_ptr = c; 10288 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c); 10289 *gtask_clauses_ptr 10290 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED); 10291 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c); 10292 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)) 10293 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1; 10294 gtask_clauses_ptr 10295 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr); 10296 break; 10297 default: 10298 gcc_unreachable (); 10299 } 10300 *gfor_clauses_ptr = NULL_TREE; 10301 *gtask_clauses_ptr = NULL_TREE; 10302 *gforo_clauses_ptr = NULL_TREE; 10303 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE); 10304 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE, 10305 NULL_TREE, NULL_TREE, NULL_TREE); 10306 gimple_omp_task_set_taskloop_p (g, true); 10307 g = gimple_build_bind (NULL_TREE, g, NULL_TREE); 10308 gomp_for *gforo 10309 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses, 10310 gimple_omp_for_collapse (gfor), 10311 gimple_omp_for_pre_body (gfor)); 10312 gimple_omp_for_set_pre_body (gfor, NULL); 10313 gimple_omp_for_set_combined_p (gforo, true); 10314 gimple_omp_for_set_combined_into_p (gfor, true); 10315 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++) 10316 { 10317 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i)); 10318 tree v = create_tmp_var (type); 10319 gimple_omp_for_set_index (gforo, i, v); 10320 t = unshare_expr (gimple_omp_for_initial (gfor, i)); 10321 gimple_omp_for_set_initial (gforo, i, t); 10322 gimple_omp_for_set_cond (gforo, i, 10323 gimple_omp_for_cond (gfor, i)); 10324 t = unshare_expr (gimple_omp_for_final (gfor, i)); 10325 gimple_omp_for_set_final (gforo, i, t); 10326 t = unshare_expr (gimple_omp_for_incr (gfor, i)); 10327 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i)); 10328 TREE_OPERAND (t, 0) = v; 10329 gimple_omp_for_set_incr (gforo, i, t); 10330 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE); 10331 OMP_CLAUSE_DECL (t) = v; 10332 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo); 10333 gimple_omp_for_set_clauses (gforo, t); 10334 } 10335 gimplify_seq_add_stmt (pre_p, gforo); 10336 } 10337 else 10338 gimplify_seq_add_stmt (pre_p, gfor); 10339 if (ret != GS_ALL_DONE) 10340 return GS_ERROR; 10341 *expr_p = NULL_TREE; 10342 return GS_ALL_DONE; 10343 } 10344 10345 /* Helper function of optimize_target_teams, find OMP_TEAMS inside 10346 of OMP_TARGET's body. */ 10347 10348 static tree 10349 find_omp_teams (tree *tp, int *walk_subtrees, void *) 10350 { 10351 *walk_subtrees = 0; 10352 switch (TREE_CODE (*tp)) 10353 { 10354 case OMP_TEAMS: 10355 return *tp; 10356 case BIND_EXPR: 10357 case STATEMENT_LIST: 10358 *walk_subtrees = 1; 10359 break; 10360 default: 10361 break; 10362 } 10363 return NULL_TREE; 10364 } 10365 10366 /* Helper function of optimize_target_teams, determine if the expression 10367 can be computed safely before the target construct on the host. */ 10368 10369 static tree 10370 computable_teams_clause (tree *tp, int *walk_subtrees, void *) 10371 { 10372 splay_tree_node n; 10373 10374 if (TYPE_P (*tp)) 10375 { 10376 *walk_subtrees = 0; 10377 return NULL_TREE; 10378 } 10379 switch (TREE_CODE (*tp)) 10380 { 10381 case VAR_DECL: 10382 case PARM_DECL: 10383 case RESULT_DECL: 10384 *walk_subtrees = 0; 10385 if (error_operand_p (*tp) 10386 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp)) 10387 || DECL_HAS_VALUE_EXPR_P (*tp) 10388 || DECL_THREAD_LOCAL_P (*tp) 10389 || TREE_SIDE_EFFECTS (*tp) 10390 || TREE_THIS_VOLATILE (*tp)) 10391 return *tp; 10392 if (is_global_var (*tp) 10393 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp)) 10394 || lookup_attribute ("omp declare target link", 10395 DECL_ATTRIBUTES (*tp)))) 10396 return *tp; 10397 if (VAR_P (*tp) 10398 && !DECL_SEEN_IN_BIND_EXPR_P (*tp) 10399 && !is_global_var (*tp) 10400 && decl_function_context (*tp) == current_function_decl) 10401 return *tp; 10402 n = splay_tree_lookup (gimplify_omp_ctxp->variables, 10403 (splay_tree_key) *tp); 10404 if (n == NULL) 10405 { 10406 if (gimplify_omp_ctxp->target_map_scalars_firstprivate) 10407 return NULL_TREE; 10408 return *tp; 10409 } 10410 else if (n->value & GOVD_LOCAL) 10411 return *tp; 10412 else if (n->value & GOVD_FIRSTPRIVATE) 10413 return NULL_TREE; 10414 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO)) 10415 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO)) 10416 return NULL_TREE; 10417 return *tp; 10418 case INTEGER_CST: 10419 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp))) 10420 return *tp; 10421 return NULL_TREE; 10422 case TARGET_EXPR: 10423 if (TARGET_EXPR_INITIAL (*tp) 10424 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL) 10425 return *tp; 10426 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp), 10427 walk_subtrees, NULL); 10428 /* Allow some reasonable subset of integral arithmetics. */ 10429 case PLUS_EXPR: 10430 case MINUS_EXPR: 10431 case MULT_EXPR: 10432 case TRUNC_DIV_EXPR: 10433 case CEIL_DIV_EXPR: 10434 case FLOOR_DIV_EXPR: 10435 case ROUND_DIV_EXPR: 10436 case TRUNC_MOD_EXPR: 10437 case CEIL_MOD_EXPR: 10438 case FLOOR_MOD_EXPR: 10439 case ROUND_MOD_EXPR: 10440 case RDIV_EXPR: 10441 case EXACT_DIV_EXPR: 10442 case MIN_EXPR: 10443 case MAX_EXPR: 10444 case LSHIFT_EXPR: 10445 case RSHIFT_EXPR: 10446 case BIT_IOR_EXPR: 10447 case BIT_XOR_EXPR: 10448 case BIT_AND_EXPR: 10449 case NEGATE_EXPR: 10450 case ABS_EXPR: 10451 case BIT_NOT_EXPR: 10452 case NON_LVALUE_EXPR: 10453 CASE_CONVERT: 10454 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp))) 10455 return *tp; 10456 return NULL_TREE; 10457 /* And disallow anything else, except for comparisons. */ 10458 default: 10459 if (COMPARISON_CLASS_P (*tp)) 10460 return NULL_TREE; 10461 return *tp; 10462 } 10463 } 10464 10465 /* Try to determine if the num_teams and/or thread_limit expressions 10466 can have their values determined already before entering the 10467 target construct. 10468 INTEGER_CSTs trivially are, 10469 integral decls that are firstprivate (explicitly or implicitly) 10470 or explicitly map(always, to:) or map(always, tofrom:) on the target 10471 region too, and expressions involving simple arithmetics on those 10472 too, function calls are not ok, dereferencing something neither etc. 10473 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of 10474 EXPR based on what we find: 10475 0 stands for clause not specified at all, use implementation default 10476 -1 stands for value that can't be determined easily before entering 10477 the target construct. 10478 If teams construct is not present at all, use 1 for num_teams 10479 and 0 for thread_limit (only one team is involved, and the thread 10480 limit is implementation defined. */ 10481 10482 static void 10483 optimize_target_teams (tree target, gimple_seq *pre_p) 10484 { 10485 tree body = OMP_BODY (target); 10486 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL); 10487 tree num_teams = integer_zero_node; 10488 tree thread_limit = integer_zero_node; 10489 location_t num_teams_loc = EXPR_LOCATION (target); 10490 location_t thread_limit_loc = EXPR_LOCATION (target); 10491 tree c, *p, expr; 10492 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp; 10493 10494 if (teams == NULL_TREE) 10495 num_teams = integer_one_node; 10496 else 10497 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c)) 10498 { 10499 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS) 10500 { 10501 p = &num_teams; 10502 num_teams_loc = OMP_CLAUSE_LOCATION (c); 10503 } 10504 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT) 10505 { 10506 p = &thread_limit; 10507 thread_limit_loc = OMP_CLAUSE_LOCATION (c); 10508 } 10509 else 10510 continue; 10511 expr = OMP_CLAUSE_OPERAND (c, 0); 10512 if (TREE_CODE (expr) == INTEGER_CST) 10513 { 10514 *p = expr; 10515 continue; 10516 } 10517 if (walk_tree (&expr, computable_teams_clause, NULL, NULL)) 10518 { 10519 *p = integer_minus_one_node; 10520 continue; 10521 } 10522 *p = expr; 10523 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context; 10524 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false) 10525 == GS_ERROR) 10526 { 10527 gimplify_omp_ctxp = target_ctx; 10528 *p = integer_minus_one_node; 10529 continue; 10530 } 10531 gimplify_omp_ctxp = target_ctx; 10532 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR) 10533 OMP_CLAUSE_OPERAND (c, 0) = *p; 10534 } 10535 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT); 10536 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit; 10537 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target); 10538 OMP_TARGET_CLAUSES (target) = c; 10539 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS); 10540 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams; 10541 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target); 10542 OMP_TARGET_CLAUSES (target) = c; 10543 } 10544 10545 /* Gimplify the gross structure of several OMP constructs. */ 10546 10547 static void 10548 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p) 10549 { 10550 tree expr = *expr_p; 10551 gimple *stmt; 10552 gimple_seq body = NULL; 10553 enum omp_region_type ort; 10554 10555 switch (TREE_CODE (expr)) 10556 { 10557 case OMP_SECTIONS: 10558 case OMP_SINGLE: 10559 ort = ORT_WORKSHARE; 10560 break; 10561 case OMP_TARGET: 10562 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET; 10563 break; 10564 case OACC_KERNELS: 10565 ort = ORT_ACC_KERNELS; 10566 break; 10567 case OACC_PARALLEL: 10568 ort = ORT_ACC_PARALLEL; 10569 break; 10570 case OACC_DATA: 10571 ort = ORT_ACC_DATA; 10572 break; 10573 case OMP_TARGET_DATA: 10574 ort = ORT_TARGET_DATA; 10575 break; 10576 case OMP_TEAMS: 10577 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS; 10578 break; 10579 case OACC_HOST_DATA: 10580 ort = ORT_ACC_HOST_DATA; 10581 break; 10582 default: 10583 gcc_unreachable (); 10584 } 10585 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort, 10586 TREE_CODE (expr)); 10587 if (TREE_CODE (expr) == OMP_TARGET) 10588 optimize_target_teams (expr, pre_p); 10589 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0) 10590 { 10591 push_gimplify_context (); 10592 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body); 10593 if (gimple_code (g) == GIMPLE_BIND) 10594 pop_gimplify_context (g); 10595 else 10596 pop_gimplify_context (NULL); 10597 if ((ort & ORT_TARGET_DATA) != 0) 10598 { 10599 enum built_in_function end_ix; 10600 switch (TREE_CODE (expr)) 10601 { 10602 case OACC_DATA: 10603 case OACC_HOST_DATA: 10604 end_ix = BUILT_IN_GOACC_DATA_END; 10605 break; 10606 case OMP_TARGET_DATA: 10607 end_ix = BUILT_IN_GOMP_TARGET_END_DATA; 10608 break; 10609 default: 10610 gcc_unreachable (); 10611 } 10612 tree fn = builtin_decl_explicit (end_ix); 10613 g = gimple_build_call (fn, 0); 10614 gimple_seq cleanup = NULL; 10615 gimple_seq_add_stmt (&cleanup, g); 10616 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY); 10617 body = NULL; 10618 gimple_seq_add_stmt (&body, g); 10619 } 10620 } 10621 else 10622 gimplify_and_add (OMP_BODY (expr), &body); 10623 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr), 10624 TREE_CODE (expr)); 10625 10626 switch (TREE_CODE (expr)) 10627 { 10628 case OACC_DATA: 10629 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA, 10630 OMP_CLAUSES (expr)); 10631 break; 10632 case OACC_KERNELS: 10633 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS, 10634 OMP_CLAUSES (expr)); 10635 break; 10636 case OACC_HOST_DATA: 10637 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA, 10638 OMP_CLAUSES (expr)); 10639 break; 10640 case OACC_PARALLEL: 10641 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL, 10642 OMP_CLAUSES (expr)); 10643 break; 10644 case OMP_SECTIONS: 10645 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr)); 10646 break; 10647 case OMP_SINGLE: 10648 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr)); 10649 break; 10650 case OMP_TARGET: 10651 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION, 10652 OMP_CLAUSES (expr)); 10653 break; 10654 case OMP_TARGET_DATA: 10655 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA, 10656 OMP_CLAUSES (expr)); 10657 break; 10658 case OMP_TEAMS: 10659 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr)); 10660 break; 10661 default: 10662 gcc_unreachable (); 10663 } 10664 10665 gimplify_seq_add_stmt (pre_p, stmt); 10666 *expr_p = NULL_TREE; 10667 } 10668 10669 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP 10670 target update constructs. */ 10671 10672 static void 10673 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p) 10674 { 10675 tree expr = *expr_p; 10676 int kind; 10677 gomp_target *stmt; 10678 enum omp_region_type ort = ORT_WORKSHARE; 10679 10680 switch (TREE_CODE (expr)) 10681 { 10682 case OACC_ENTER_DATA: 10683 case OACC_EXIT_DATA: 10684 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA; 10685 ort = ORT_ACC; 10686 break; 10687 case OACC_UPDATE: 10688 kind = GF_OMP_TARGET_KIND_OACC_UPDATE; 10689 ort = ORT_ACC; 10690 break; 10691 case OMP_TARGET_UPDATE: 10692 kind = GF_OMP_TARGET_KIND_UPDATE; 10693 break; 10694 case OMP_TARGET_ENTER_DATA: 10695 kind = GF_OMP_TARGET_KIND_ENTER_DATA; 10696 break; 10697 case OMP_TARGET_EXIT_DATA: 10698 kind = GF_OMP_TARGET_KIND_EXIT_DATA; 10699 break; 10700 default: 10701 gcc_unreachable (); 10702 } 10703 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p, 10704 ort, TREE_CODE (expr)); 10705 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr), 10706 TREE_CODE (expr)); 10707 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr)); 10708 10709 gimplify_seq_add_stmt (pre_p, stmt); 10710 *expr_p = NULL_TREE; 10711 } 10712 10713 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have 10714 stabilized the lhs of the atomic operation as *ADDR. Return true if 10715 EXPR is this stabilized form. */ 10716 10717 static bool 10718 goa_lhs_expr_p (tree expr, tree addr) 10719 { 10720 /* Also include casts to other type variants. The C front end is fond 10721 of adding these for e.g. volatile variables. This is like 10722 STRIP_TYPE_NOPS but includes the main variant lookup. */ 10723 STRIP_USELESS_TYPE_CONVERSION (expr); 10724 10725 if (TREE_CODE (expr) == INDIRECT_REF) 10726 { 10727 expr = TREE_OPERAND (expr, 0); 10728 while (expr != addr 10729 && (CONVERT_EXPR_P (expr) 10730 || TREE_CODE (expr) == NON_LVALUE_EXPR) 10731 && TREE_CODE (expr) == TREE_CODE (addr) 10732 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr))) 10733 { 10734 expr = TREE_OPERAND (expr, 0); 10735 addr = TREE_OPERAND (addr, 0); 10736 } 10737 if (expr == addr) 10738 return true; 10739 return (TREE_CODE (addr) == ADDR_EXPR 10740 && TREE_CODE (expr) == ADDR_EXPR 10741 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0)); 10742 } 10743 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0)) 10744 return true; 10745 return false; 10746 } 10747 10748 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an 10749 expression does not involve the lhs, evaluate it into a temporary. 10750 Return 1 if the lhs appeared as a subexpression, 0 if it did not, 10751 or -1 if an error was encountered. */ 10752 10753 static int 10754 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr, 10755 tree lhs_var) 10756 { 10757 tree expr = *expr_p; 10758 int saw_lhs; 10759 10760 if (goa_lhs_expr_p (expr, lhs_addr)) 10761 { 10762 *expr_p = lhs_var; 10763 return 1; 10764 } 10765 if (is_gimple_val (expr)) 10766 return 0; 10767 10768 saw_lhs = 0; 10769 switch (TREE_CODE_CLASS (TREE_CODE (expr))) 10770 { 10771 case tcc_binary: 10772 case tcc_comparison: 10773 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr, 10774 lhs_var); 10775 /* FALLTHRU */ 10776 case tcc_unary: 10777 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr, 10778 lhs_var); 10779 break; 10780 case tcc_expression: 10781 switch (TREE_CODE (expr)) 10782 { 10783 case TRUTH_ANDIF_EXPR: 10784 case TRUTH_ORIF_EXPR: 10785 case TRUTH_AND_EXPR: 10786 case TRUTH_OR_EXPR: 10787 case TRUTH_XOR_EXPR: 10788 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, 10789 lhs_addr, lhs_var); 10790 /* FALLTHRU */ 10791 case TRUTH_NOT_EXPR: 10792 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, 10793 lhs_addr, lhs_var); 10794 break; 10795 case COMPOUND_EXPR: 10796 /* Break out any preevaluations from cp_build_modify_expr. */ 10797 for (; TREE_CODE (expr) == COMPOUND_EXPR; 10798 expr = TREE_OPERAND (expr, 1)) 10799 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p); 10800 *expr_p = expr; 10801 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var); 10802 default: 10803 break; 10804 } 10805 break; 10806 default: 10807 break; 10808 } 10809 10810 if (saw_lhs == 0) 10811 { 10812 enum gimplify_status gs; 10813 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue); 10814 if (gs != GS_ALL_DONE) 10815 saw_lhs = -1; 10816 } 10817 10818 return saw_lhs; 10819 } 10820 10821 /* Gimplify an OMP_ATOMIC statement. */ 10822 10823 static enum gimplify_status 10824 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p) 10825 { 10826 tree addr = TREE_OPERAND (*expr_p, 0); 10827 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ 10828 ? NULL : TREE_OPERAND (*expr_p, 1); 10829 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr))); 10830 tree tmp_load; 10831 gomp_atomic_load *loadstmt; 10832 gomp_atomic_store *storestmt; 10833 10834 tmp_load = create_tmp_reg (type); 10835 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0) 10836 return GS_ERROR; 10837 10838 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue) 10839 != GS_ALL_DONE) 10840 return GS_ERROR; 10841 10842 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr); 10843 gimplify_seq_add_stmt (pre_p, loadstmt); 10844 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue) 10845 != GS_ALL_DONE) 10846 return GS_ERROR; 10847 10848 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ) 10849 rhs = tmp_load; 10850 storestmt = gimple_build_omp_atomic_store (rhs); 10851 gimplify_seq_add_stmt (pre_p, storestmt); 10852 if (OMP_ATOMIC_SEQ_CST (*expr_p)) 10853 { 10854 gimple_omp_atomic_set_seq_cst (loadstmt); 10855 gimple_omp_atomic_set_seq_cst (storestmt); 10856 } 10857 switch (TREE_CODE (*expr_p)) 10858 { 10859 case OMP_ATOMIC_READ: 10860 case OMP_ATOMIC_CAPTURE_OLD: 10861 *expr_p = tmp_load; 10862 gimple_omp_atomic_set_need_value (loadstmt); 10863 break; 10864 case OMP_ATOMIC_CAPTURE_NEW: 10865 *expr_p = rhs; 10866 gimple_omp_atomic_set_need_value (storestmt); 10867 break; 10868 default: 10869 *expr_p = NULL; 10870 break; 10871 } 10872 10873 return GS_ALL_DONE; 10874 } 10875 10876 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the 10877 body, and adding some EH bits. */ 10878 10879 static enum gimplify_status 10880 gimplify_transaction (tree *expr_p, gimple_seq *pre_p) 10881 { 10882 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr); 10883 gimple *body_stmt; 10884 gtransaction *trans_stmt; 10885 gimple_seq body = NULL; 10886 int subcode = 0; 10887 10888 /* Wrap the transaction body in a BIND_EXPR so we have a context 10889 where to put decls for OMP. */ 10890 if (TREE_CODE (tbody) != BIND_EXPR) 10891 { 10892 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL); 10893 TREE_SIDE_EFFECTS (bind) = 1; 10894 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody)); 10895 TRANSACTION_EXPR_BODY (expr) = bind; 10896 } 10897 10898 push_gimplify_context (); 10899 temp = voidify_wrapper_expr (*expr_p, NULL); 10900 10901 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body); 10902 pop_gimplify_context (body_stmt); 10903 10904 trans_stmt = gimple_build_transaction (body); 10905 if (TRANSACTION_EXPR_OUTER (expr)) 10906 subcode = GTMA_IS_OUTER; 10907 else if (TRANSACTION_EXPR_RELAXED (expr)) 10908 subcode = GTMA_IS_RELAXED; 10909 gimple_transaction_set_subcode (trans_stmt, subcode); 10910 10911 gimplify_seq_add_stmt (pre_p, trans_stmt); 10912 10913 if (temp) 10914 { 10915 *expr_p = temp; 10916 return GS_OK; 10917 } 10918 10919 *expr_p = NULL_TREE; 10920 return GS_ALL_DONE; 10921 } 10922 10923 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY 10924 is the OMP_BODY of the original EXPR (which has already been 10925 gimplified so it's not present in the EXPR). 10926 10927 Return the gimplified GIMPLE_OMP_ORDERED tuple. */ 10928 10929 static gimple * 10930 gimplify_omp_ordered (tree expr, gimple_seq body) 10931 { 10932 tree c, decls; 10933 int failures = 0; 10934 unsigned int i; 10935 tree source_c = NULL_TREE; 10936 tree sink_c = NULL_TREE; 10937 10938 if (gimplify_omp_ctxp) 10939 { 10940 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c)) 10941 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND 10942 && gimplify_omp_ctxp->loop_iter_var.is_empty () 10943 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK 10944 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)) 10945 { 10946 error_at (OMP_CLAUSE_LOCATION (c), 10947 "%<ordered%> construct with %<depend%> clause must be " 10948 "closely nested inside a loop with %<ordered%> clause " 10949 "with a parameter"); 10950 failures++; 10951 } 10952 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND 10953 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK) 10954 { 10955 bool fail = false; 10956 for (decls = OMP_CLAUSE_DECL (c), i = 0; 10957 decls && TREE_CODE (decls) == TREE_LIST; 10958 decls = TREE_CHAIN (decls), ++i) 10959 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2) 10960 continue; 10961 else if (TREE_VALUE (decls) 10962 != gimplify_omp_ctxp->loop_iter_var[2 * i]) 10963 { 10964 error_at (OMP_CLAUSE_LOCATION (c), 10965 "variable %qE is not an iteration " 10966 "of outermost loop %d, expected %qE", 10967 TREE_VALUE (decls), i + 1, 10968 gimplify_omp_ctxp->loop_iter_var[2 * i]); 10969 fail = true; 10970 failures++; 10971 } 10972 else 10973 TREE_VALUE (decls) 10974 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1]; 10975 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2) 10976 { 10977 error_at (OMP_CLAUSE_LOCATION (c), 10978 "number of variables in %<depend(sink)%> " 10979 "clause does not match number of " 10980 "iteration variables"); 10981 failures++; 10982 } 10983 sink_c = c; 10984 } 10985 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND 10986 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE) 10987 { 10988 if (source_c) 10989 { 10990 error_at (OMP_CLAUSE_LOCATION (c), 10991 "more than one %<depend(source)%> clause on an " 10992 "%<ordered%> construct"); 10993 failures++; 10994 } 10995 else 10996 source_c = c; 10997 } 10998 } 10999 if (source_c && sink_c) 11000 { 11001 error_at (OMP_CLAUSE_LOCATION (source_c), 11002 "%<depend(source)%> clause specified together with " 11003 "%<depend(sink:)%> clauses on the same construct"); 11004 failures++; 11005 } 11006 11007 if (failures) 11008 return gimple_build_nop (); 11009 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr)); 11010 } 11011 11012 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the 11013 expression produces a value to be used as an operand inside a GIMPLE 11014 statement, the value will be stored back in *EXPR_P. This value will 11015 be a tree of class tcc_declaration, tcc_constant, tcc_reference or 11016 an SSA_NAME. The corresponding sequence of GIMPLE statements is 11017 emitted in PRE_P and POST_P. 11018 11019 Additionally, this process may overwrite parts of the input 11020 expression during gimplification. Ideally, it should be 11021 possible to do non-destructive gimplification. 11022 11023 EXPR_P points to the GENERIC expression to convert to GIMPLE. If 11024 the expression needs to evaluate to a value to be used as 11025 an operand in a GIMPLE statement, this value will be stored in 11026 *EXPR_P on exit. This happens when the caller specifies one 11027 of fb_lvalue or fb_rvalue fallback flags. 11028 11029 PRE_P will contain the sequence of GIMPLE statements corresponding 11030 to the evaluation of EXPR and all the side-effects that must 11031 be executed before the main expression. On exit, the last 11032 statement of PRE_P is the core statement being gimplified. For 11033 instance, when gimplifying 'if (++a)' the last statement in 11034 PRE_P will be 'if (t.1)' where t.1 is the result of 11035 pre-incrementing 'a'. 11036 11037 POST_P will contain the sequence of GIMPLE statements corresponding 11038 to the evaluation of all the side-effects that must be executed 11039 after the main expression. If this is NULL, the post 11040 side-effects are stored at the end of PRE_P. 11041 11042 The reason why the output is split in two is to handle post 11043 side-effects explicitly. In some cases, an expression may have 11044 inner and outer post side-effects which need to be emitted in 11045 an order different from the one given by the recursive 11046 traversal. For instance, for the expression (*p--)++ the post 11047 side-effects of '--' must actually occur *after* the post 11048 side-effects of '++'. However, gimplification will first visit 11049 the inner expression, so if a separate POST sequence was not 11050 used, the resulting sequence would be: 11051 11052 1 t.1 = *p 11053 2 p = p - 1 11054 3 t.2 = t.1 + 1 11055 4 *p = t.2 11056 11057 However, the post-decrement operation in line #2 must not be 11058 evaluated until after the store to *p at line #4, so the 11059 correct sequence should be: 11060 11061 1 t.1 = *p 11062 2 t.2 = t.1 + 1 11063 3 *p = t.2 11064 4 p = p - 1 11065 11066 So, by specifying a separate post queue, it is possible 11067 to emit the post side-effects in the correct order. 11068 If POST_P is NULL, an internal queue will be used. Before 11069 returning to the caller, the sequence POST_P is appended to 11070 the main output sequence PRE_P. 11071 11072 GIMPLE_TEST_F points to a function that takes a tree T and 11073 returns nonzero if T is in the GIMPLE form requested by the 11074 caller. The GIMPLE predicates are in gimple.c. 11075 11076 FALLBACK tells the function what sort of a temporary we want if 11077 gimplification cannot produce an expression that complies with 11078 GIMPLE_TEST_F. 11079 11080 fb_none means that no temporary should be generated 11081 fb_rvalue means that an rvalue is OK to generate 11082 fb_lvalue means that an lvalue is OK to generate 11083 fb_either means that either is OK, but an lvalue is preferable. 11084 fb_mayfail means that gimplification may fail (in which case 11085 GS_ERROR will be returned) 11086 11087 The return value is either GS_ERROR or GS_ALL_DONE, since this 11088 function iterates until EXPR is completely gimplified or an error 11089 occurs. */ 11090 11091 enum gimplify_status 11092 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 11093 bool (*gimple_test_f) (tree), fallback_t fallback) 11094 { 11095 tree tmp; 11096 gimple_seq internal_pre = NULL; 11097 gimple_seq internal_post = NULL; 11098 tree save_expr; 11099 bool is_statement; 11100 location_t saved_location; 11101 enum gimplify_status ret; 11102 gimple_stmt_iterator pre_last_gsi, post_last_gsi; 11103 tree label; 11104 11105 save_expr = *expr_p; 11106 if (save_expr == NULL_TREE) 11107 return GS_ALL_DONE; 11108 11109 /* If we are gimplifying a top-level statement, PRE_P must be valid. */ 11110 is_statement = gimple_test_f == is_gimple_stmt; 11111 if (is_statement) 11112 gcc_assert (pre_p); 11113 11114 /* Consistency checks. */ 11115 if (gimple_test_f == is_gimple_reg) 11116 gcc_assert (fallback & (fb_rvalue | fb_lvalue)); 11117 else if (gimple_test_f == is_gimple_val 11118 || gimple_test_f == is_gimple_call_addr 11119 || gimple_test_f == is_gimple_condexpr 11120 || gimple_test_f == is_gimple_mem_rhs 11121 || gimple_test_f == is_gimple_mem_rhs_or_call 11122 || gimple_test_f == is_gimple_reg_rhs 11123 || gimple_test_f == is_gimple_reg_rhs_or_call 11124 || gimple_test_f == is_gimple_asm_val 11125 || gimple_test_f == is_gimple_mem_ref_addr) 11126 gcc_assert (fallback & fb_rvalue); 11127 else if (gimple_test_f == is_gimple_min_lval 11128 || gimple_test_f == is_gimple_lvalue) 11129 gcc_assert (fallback & fb_lvalue); 11130 else if (gimple_test_f == is_gimple_addressable) 11131 gcc_assert (fallback & fb_either); 11132 else if (gimple_test_f == is_gimple_stmt) 11133 gcc_assert (fallback == fb_none); 11134 else 11135 { 11136 /* We should have recognized the GIMPLE_TEST_F predicate to 11137 know what kind of fallback to use in case a temporary is 11138 needed to hold the value or address of *EXPR_P. */ 11139 gcc_unreachable (); 11140 } 11141 11142 /* We used to check the predicate here and return immediately if it 11143 succeeds. This is wrong; the design is for gimplification to be 11144 idempotent, and for the predicates to only test for valid forms, not 11145 whether they are fully simplified. */ 11146 if (pre_p == NULL) 11147 pre_p = &internal_pre; 11148 11149 if (post_p == NULL) 11150 post_p = &internal_post; 11151 11152 /* Remember the last statements added to PRE_P and POST_P. Every 11153 new statement added by the gimplification helpers needs to be 11154 annotated with location information. To centralize the 11155 responsibility, we remember the last statement that had been 11156 added to both queues before gimplifying *EXPR_P. If 11157 gimplification produces new statements in PRE_P and POST_P, those 11158 statements will be annotated with the same location information 11159 as *EXPR_P. */ 11160 pre_last_gsi = gsi_last (*pre_p); 11161 post_last_gsi = gsi_last (*post_p); 11162 11163 saved_location = input_location; 11164 if (save_expr != error_mark_node 11165 && EXPR_HAS_LOCATION (*expr_p)) 11166 input_location = EXPR_LOCATION (*expr_p); 11167 11168 /* Loop over the specific gimplifiers until the toplevel node 11169 remains the same. */ 11170 do 11171 { 11172 /* Strip away as many useless type conversions as possible 11173 at the toplevel. */ 11174 STRIP_USELESS_TYPE_CONVERSION (*expr_p); 11175 11176 /* Remember the expr. */ 11177 save_expr = *expr_p; 11178 11179 /* Die, die, die, my darling. */ 11180 if (save_expr == error_mark_node 11181 || (TREE_TYPE (save_expr) 11182 && TREE_TYPE (save_expr) == error_mark_node)) 11183 { 11184 ret = GS_ERROR; 11185 break; 11186 } 11187 11188 /* Do any language-specific gimplification. */ 11189 ret = ((enum gimplify_status) 11190 lang_hooks.gimplify_expr (expr_p, pre_p, post_p)); 11191 if (ret == GS_OK) 11192 { 11193 if (*expr_p == NULL_TREE) 11194 break; 11195 if (*expr_p != save_expr) 11196 continue; 11197 } 11198 else if (ret != GS_UNHANDLED) 11199 break; 11200 11201 /* Make sure that all the cases set 'ret' appropriately. */ 11202 ret = GS_UNHANDLED; 11203 switch (TREE_CODE (*expr_p)) 11204 { 11205 /* First deal with the special cases. */ 11206 11207 case POSTINCREMENT_EXPR: 11208 case POSTDECREMENT_EXPR: 11209 case PREINCREMENT_EXPR: 11210 case PREDECREMENT_EXPR: 11211 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p, 11212 fallback != fb_none, 11213 TREE_TYPE (*expr_p)); 11214 break; 11215 11216 case VIEW_CONVERT_EXPR: 11217 if (is_gimple_reg_type (TREE_TYPE (*expr_p)) 11218 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))) 11219 { 11220 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 11221 post_p, is_gimple_val, fb_rvalue); 11222 recalculate_side_effects (*expr_p); 11223 break; 11224 } 11225 /* Fallthru. */ 11226 11227 case ARRAY_REF: 11228 case ARRAY_RANGE_REF: 11229 case REALPART_EXPR: 11230 case IMAGPART_EXPR: 11231 case COMPONENT_REF: 11232 ret = gimplify_compound_lval (expr_p, pre_p, post_p, 11233 fallback ? fallback : fb_rvalue); 11234 break; 11235 11236 case COND_EXPR: 11237 ret = gimplify_cond_expr (expr_p, pre_p, fallback); 11238 11239 /* C99 code may assign to an array in a structure value of a 11240 conditional expression, and this has undefined behavior 11241 only on execution, so create a temporary if an lvalue is 11242 required. */ 11243 if (fallback == fb_lvalue) 11244 { 11245 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false); 11246 mark_addressable (*expr_p); 11247 ret = GS_OK; 11248 } 11249 break; 11250 11251 case CALL_EXPR: 11252 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none); 11253 11254 /* C99 code may assign to an array in a structure returned 11255 from a function, and this has undefined behavior only on 11256 execution, so create a temporary if an lvalue is 11257 required. */ 11258 if (fallback == fb_lvalue) 11259 { 11260 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false); 11261 mark_addressable (*expr_p); 11262 ret = GS_OK; 11263 } 11264 break; 11265 11266 case TREE_LIST: 11267 gcc_unreachable (); 11268 11269 case COMPOUND_EXPR: 11270 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none); 11271 break; 11272 11273 case COMPOUND_LITERAL_EXPR: 11274 ret = gimplify_compound_literal_expr (expr_p, pre_p, 11275 gimple_test_f, fallback); 11276 break; 11277 11278 case MODIFY_EXPR: 11279 case INIT_EXPR: 11280 ret = gimplify_modify_expr (expr_p, pre_p, post_p, 11281 fallback != fb_none); 11282 break; 11283 11284 case TRUTH_ANDIF_EXPR: 11285 case TRUTH_ORIF_EXPR: 11286 { 11287 /* Preserve the original type of the expression and the 11288 source location of the outer expression. */ 11289 tree org_type = TREE_TYPE (*expr_p); 11290 *expr_p = gimple_boolify (*expr_p); 11291 *expr_p = build3_loc (input_location, COND_EXPR, 11292 org_type, *expr_p, 11293 fold_convert_loc 11294 (input_location, 11295 org_type, boolean_true_node), 11296 fold_convert_loc 11297 (input_location, 11298 org_type, boolean_false_node)); 11299 ret = GS_OK; 11300 break; 11301 } 11302 11303 case TRUTH_NOT_EXPR: 11304 { 11305 tree type = TREE_TYPE (*expr_p); 11306 /* The parsers are careful to generate TRUTH_NOT_EXPR 11307 only with operands that are always zero or one. 11308 We do not fold here but handle the only interesting case 11309 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */ 11310 *expr_p = gimple_boolify (*expr_p); 11311 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1) 11312 *expr_p = build1_loc (input_location, BIT_NOT_EXPR, 11313 TREE_TYPE (*expr_p), 11314 TREE_OPERAND (*expr_p, 0)); 11315 else 11316 *expr_p = build2_loc (input_location, BIT_XOR_EXPR, 11317 TREE_TYPE (*expr_p), 11318 TREE_OPERAND (*expr_p, 0), 11319 build_int_cst (TREE_TYPE (*expr_p), 1)); 11320 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p))) 11321 *expr_p = fold_convert_loc (input_location, type, *expr_p); 11322 ret = GS_OK; 11323 break; 11324 } 11325 11326 case ADDR_EXPR: 11327 ret = gimplify_addr_expr (expr_p, pre_p, post_p); 11328 break; 11329 11330 case ANNOTATE_EXPR: 11331 { 11332 tree cond = TREE_OPERAND (*expr_p, 0); 11333 tree kind = TREE_OPERAND (*expr_p, 1); 11334 tree type = TREE_TYPE (cond); 11335 if (!INTEGRAL_TYPE_P (type)) 11336 { 11337 *expr_p = cond; 11338 ret = GS_OK; 11339 break; 11340 } 11341 tree tmp = create_tmp_var (type); 11342 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p)); 11343 gcall *call 11344 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind); 11345 gimple_call_set_lhs (call, tmp); 11346 gimplify_seq_add_stmt (pre_p, call); 11347 *expr_p = tmp; 11348 ret = GS_ALL_DONE; 11349 break; 11350 } 11351 11352 case VA_ARG_EXPR: 11353 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p); 11354 break; 11355 11356 CASE_CONVERT: 11357 if (IS_EMPTY_STMT (*expr_p)) 11358 { 11359 ret = GS_ALL_DONE; 11360 break; 11361 } 11362 11363 if (VOID_TYPE_P (TREE_TYPE (*expr_p)) 11364 || fallback == fb_none) 11365 { 11366 /* Just strip a conversion to void (or in void context) and 11367 try again. */ 11368 *expr_p = TREE_OPERAND (*expr_p, 0); 11369 ret = GS_OK; 11370 break; 11371 } 11372 11373 ret = gimplify_conversion (expr_p); 11374 if (ret == GS_ERROR) 11375 break; 11376 if (*expr_p != save_expr) 11377 break; 11378 /* FALLTHRU */ 11379 11380 case FIX_TRUNC_EXPR: 11381 /* unary_expr: ... | '(' cast ')' val | ... */ 11382 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 11383 is_gimple_val, fb_rvalue); 11384 recalculate_side_effects (*expr_p); 11385 break; 11386 11387 case INDIRECT_REF: 11388 { 11389 bool volatilep = TREE_THIS_VOLATILE (*expr_p); 11390 bool notrap = TREE_THIS_NOTRAP (*expr_p); 11391 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0)); 11392 11393 *expr_p = fold_indirect_ref_loc (input_location, *expr_p); 11394 if (*expr_p != save_expr) 11395 { 11396 ret = GS_OK; 11397 break; 11398 } 11399 11400 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 11401 is_gimple_reg, fb_rvalue); 11402 if (ret == GS_ERROR) 11403 break; 11404 11405 recalculate_side_effects (*expr_p); 11406 *expr_p = fold_build2_loc (input_location, MEM_REF, 11407 TREE_TYPE (*expr_p), 11408 TREE_OPERAND (*expr_p, 0), 11409 build_int_cst (saved_ptr_type, 0)); 11410 TREE_THIS_VOLATILE (*expr_p) = volatilep; 11411 TREE_THIS_NOTRAP (*expr_p) = notrap; 11412 ret = GS_OK; 11413 break; 11414 } 11415 11416 /* We arrive here through the various re-gimplifcation paths. */ 11417 case MEM_REF: 11418 /* First try re-folding the whole thing. */ 11419 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p), 11420 TREE_OPERAND (*expr_p, 0), 11421 TREE_OPERAND (*expr_p, 1)); 11422 if (tmp) 11423 { 11424 REF_REVERSE_STORAGE_ORDER (tmp) 11425 = REF_REVERSE_STORAGE_ORDER (*expr_p); 11426 *expr_p = tmp; 11427 recalculate_side_effects (*expr_p); 11428 ret = GS_OK; 11429 break; 11430 } 11431 /* Avoid re-gimplifying the address operand if it is already 11432 in suitable form. Re-gimplifying would mark the address 11433 operand addressable. Always gimplify when not in SSA form 11434 as we still may have to gimplify decls with value-exprs. */ 11435 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun) 11436 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0))) 11437 { 11438 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 11439 is_gimple_mem_ref_addr, fb_rvalue); 11440 if (ret == GS_ERROR) 11441 break; 11442 } 11443 recalculate_side_effects (*expr_p); 11444 ret = GS_ALL_DONE; 11445 break; 11446 11447 /* Constants need not be gimplified. */ 11448 case INTEGER_CST: 11449 case REAL_CST: 11450 case FIXED_CST: 11451 case STRING_CST: 11452 case COMPLEX_CST: 11453 case VECTOR_CST: 11454 /* Drop the overflow flag on constants, we do not want 11455 that in the GIMPLE IL. */ 11456 if (TREE_OVERFLOW_P (*expr_p)) 11457 *expr_p = drop_tree_overflow (*expr_p); 11458 ret = GS_ALL_DONE; 11459 break; 11460 11461 case CONST_DECL: 11462 /* If we require an lvalue, such as for ADDR_EXPR, retain the 11463 CONST_DECL node. Otherwise the decl is replaceable by its 11464 value. */ 11465 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */ 11466 if (fallback & fb_lvalue) 11467 ret = GS_ALL_DONE; 11468 else 11469 { 11470 *expr_p = DECL_INITIAL (*expr_p); 11471 ret = GS_OK; 11472 } 11473 break; 11474 11475 case DECL_EXPR: 11476 ret = gimplify_decl_expr (expr_p, pre_p); 11477 break; 11478 11479 case BIND_EXPR: 11480 ret = gimplify_bind_expr (expr_p, pre_p); 11481 break; 11482 11483 case LOOP_EXPR: 11484 ret = gimplify_loop_expr (expr_p, pre_p); 11485 break; 11486 11487 case SWITCH_EXPR: 11488 ret = gimplify_switch_expr (expr_p, pre_p); 11489 break; 11490 11491 case EXIT_EXPR: 11492 ret = gimplify_exit_expr (expr_p); 11493 break; 11494 11495 case GOTO_EXPR: 11496 /* If the target is not LABEL, then it is a computed jump 11497 and the target needs to be gimplified. */ 11498 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL) 11499 { 11500 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p, 11501 NULL, is_gimple_val, fb_rvalue); 11502 if (ret == GS_ERROR) 11503 break; 11504 } 11505 gimplify_seq_add_stmt (pre_p, 11506 gimple_build_goto (GOTO_DESTINATION (*expr_p))); 11507 ret = GS_ALL_DONE; 11508 break; 11509 11510 case PREDICT_EXPR: 11511 gimplify_seq_add_stmt (pre_p, 11512 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p), 11513 PREDICT_EXPR_OUTCOME (*expr_p))); 11514 ret = GS_ALL_DONE; 11515 break; 11516 11517 case LABEL_EXPR: 11518 ret = gimplify_label_expr (expr_p, pre_p); 11519 label = LABEL_EXPR_LABEL (*expr_p); 11520 gcc_assert (decl_function_context (label) == current_function_decl); 11521 11522 /* If the label is used in a goto statement, or address of the label 11523 is taken, we need to unpoison all variables that were seen so far. 11524 Doing so would prevent us from reporting a false positives. */ 11525 if (asan_poisoned_variables 11526 && asan_used_labels != NULL 11527 && asan_used_labels->contains (label)) 11528 asan_poison_variables (asan_poisoned_variables, false, pre_p); 11529 break; 11530 11531 case CASE_LABEL_EXPR: 11532 ret = gimplify_case_label_expr (expr_p, pre_p); 11533 11534 if (gimplify_ctxp->live_switch_vars) 11535 asan_poison_variables (gimplify_ctxp->live_switch_vars, false, 11536 pre_p); 11537 break; 11538 11539 case RETURN_EXPR: 11540 ret = gimplify_return_expr (*expr_p, pre_p); 11541 break; 11542 11543 case CONSTRUCTOR: 11544 /* Don't reduce this in place; let gimplify_init_constructor work its 11545 magic. Buf if we're just elaborating this for side effects, just 11546 gimplify any element that has side-effects. */ 11547 if (fallback == fb_none) 11548 { 11549 unsigned HOST_WIDE_INT ix; 11550 tree val; 11551 tree temp = NULL_TREE; 11552 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val) 11553 if (TREE_SIDE_EFFECTS (val)) 11554 append_to_statement_list (val, &temp); 11555 11556 *expr_p = temp; 11557 ret = temp ? GS_OK : GS_ALL_DONE; 11558 } 11559 /* C99 code may assign to an array in a constructed 11560 structure or union, and this has undefined behavior only 11561 on execution, so create a temporary if an lvalue is 11562 required. */ 11563 else if (fallback == fb_lvalue) 11564 { 11565 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false); 11566 mark_addressable (*expr_p); 11567 ret = GS_OK; 11568 } 11569 else 11570 ret = GS_ALL_DONE; 11571 break; 11572 11573 /* The following are special cases that are not handled by the 11574 original GIMPLE grammar. */ 11575 11576 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and 11577 eliminated. */ 11578 case SAVE_EXPR: 11579 ret = gimplify_save_expr (expr_p, pre_p, post_p); 11580 break; 11581 11582 case BIT_FIELD_REF: 11583 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 11584 post_p, is_gimple_lvalue, fb_either); 11585 recalculate_side_effects (*expr_p); 11586 break; 11587 11588 case TARGET_MEM_REF: 11589 { 11590 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE; 11591 11592 if (TMR_BASE (*expr_p)) 11593 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p, 11594 post_p, is_gimple_mem_ref_addr, fb_either); 11595 if (TMR_INDEX (*expr_p)) 11596 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p, 11597 post_p, is_gimple_val, fb_rvalue); 11598 if (TMR_INDEX2 (*expr_p)) 11599 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p, 11600 post_p, is_gimple_val, fb_rvalue); 11601 /* TMR_STEP and TMR_OFFSET are always integer constants. */ 11602 ret = MIN (r0, r1); 11603 } 11604 break; 11605 11606 case NON_LVALUE_EXPR: 11607 /* This should have been stripped above. */ 11608 gcc_unreachable (); 11609 11610 case ASM_EXPR: 11611 ret = gimplify_asm_expr (expr_p, pre_p, post_p); 11612 break; 11613 11614 case TRY_FINALLY_EXPR: 11615 case TRY_CATCH_EXPR: 11616 { 11617 gimple_seq eval, cleanup; 11618 gtry *try_; 11619 11620 /* Calls to destructors are generated automatically in FINALLY/CATCH 11621 block. They should have location as UNKNOWN_LOCATION. However, 11622 gimplify_call_expr will reset these call stmts to input_location 11623 if it finds stmt's location is unknown. To prevent resetting for 11624 destructors, we set the input_location to unknown. 11625 Note that this only affects the destructor calls in FINALLY/CATCH 11626 block, and will automatically reset to its original value by the 11627 end of gimplify_expr. */ 11628 input_location = UNKNOWN_LOCATION; 11629 eval = cleanup = NULL; 11630 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval); 11631 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup); 11632 /* Don't create bogus GIMPLE_TRY with empty cleanup. */ 11633 if (gimple_seq_empty_p (cleanup)) 11634 { 11635 gimple_seq_add_seq (pre_p, eval); 11636 ret = GS_ALL_DONE; 11637 break; 11638 } 11639 try_ = gimple_build_try (eval, cleanup, 11640 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR 11641 ? GIMPLE_TRY_FINALLY 11642 : GIMPLE_TRY_CATCH); 11643 if (EXPR_HAS_LOCATION (save_expr)) 11644 gimple_set_location (try_, EXPR_LOCATION (save_expr)); 11645 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION) 11646 gimple_set_location (try_, saved_location); 11647 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR) 11648 gimple_try_set_catch_is_cleanup (try_, 11649 TRY_CATCH_IS_CLEANUP (*expr_p)); 11650 gimplify_seq_add_stmt (pre_p, try_); 11651 ret = GS_ALL_DONE; 11652 break; 11653 } 11654 11655 case CLEANUP_POINT_EXPR: 11656 ret = gimplify_cleanup_point_expr (expr_p, pre_p); 11657 break; 11658 11659 case TARGET_EXPR: 11660 ret = gimplify_target_expr (expr_p, pre_p, post_p); 11661 break; 11662 11663 case CATCH_EXPR: 11664 { 11665 gimple *c; 11666 gimple_seq handler = NULL; 11667 gimplify_and_add (CATCH_BODY (*expr_p), &handler); 11668 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler); 11669 gimplify_seq_add_stmt (pre_p, c); 11670 ret = GS_ALL_DONE; 11671 break; 11672 } 11673 11674 case EH_FILTER_EXPR: 11675 { 11676 gimple *ehf; 11677 gimple_seq failure = NULL; 11678 11679 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure); 11680 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure); 11681 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p)); 11682 gimplify_seq_add_stmt (pre_p, ehf); 11683 ret = GS_ALL_DONE; 11684 break; 11685 } 11686 11687 case OBJ_TYPE_REF: 11688 { 11689 enum gimplify_status r0, r1; 11690 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, 11691 post_p, is_gimple_val, fb_rvalue); 11692 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, 11693 post_p, is_gimple_val, fb_rvalue); 11694 TREE_SIDE_EFFECTS (*expr_p) = 0; 11695 ret = MIN (r0, r1); 11696 } 11697 break; 11698 11699 case LABEL_DECL: 11700 /* We get here when taking the address of a label. We mark 11701 the label as "forced"; meaning it can never be removed and 11702 it is a potential target for any computed goto. */ 11703 FORCED_LABEL (*expr_p) = 1; 11704 ret = GS_ALL_DONE; 11705 break; 11706 11707 case STATEMENT_LIST: 11708 ret = gimplify_statement_list (expr_p, pre_p); 11709 break; 11710 11711 case WITH_SIZE_EXPR: 11712 { 11713 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 11714 post_p == &internal_post ? NULL : post_p, 11715 gimple_test_f, fallback); 11716 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, 11717 is_gimple_val, fb_rvalue); 11718 ret = GS_ALL_DONE; 11719 } 11720 break; 11721 11722 case VAR_DECL: 11723 case PARM_DECL: 11724 ret = gimplify_var_or_parm_decl (expr_p); 11725 break; 11726 11727 case RESULT_DECL: 11728 /* When within an OMP context, notice uses of variables. */ 11729 if (gimplify_omp_ctxp) 11730 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true); 11731 ret = GS_ALL_DONE; 11732 break; 11733 11734 case SSA_NAME: 11735 /* Allow callbacks into the gimplifier during optimization. */ 11736 ret = GS_ALL_DONE; 11737 break; 11738 11739 case OMP_PARALLEL: 11740 gimplify_omp_parallel (expr_p, pre_p); 11741 ret = GS_ALL_DONE; 11742 break; 11743 11744 case OMP_TASK: 11745 gimplify_omp_task (expr_p, pre_p); 11746 ret = GS_ALL_DONE; 11747 break; 11748 11749 case OMP_FOR: 11750 case OMP_SIMD: 11751 case CILK_SIMD: 11752 case CILK_FOR: 11753 case OMP_DISTRIBUTE: 11754 case OMP_TASKLOOP: 11755 case OACC_LOOP: 11756 ret = gimplify_omp_for (expr_p, pre_p); 11757 break; 11758 11759 case OACC_CACHE: 11760 gimplify_oacc_cache (expr_p, pre_p); 11761 ret = GS_ALL_DONE; 11762 break; 11763 11764 case OACC_DECLARE: 11765 gimplify_oacc_declare (expr_p, pre_p); 11766 ret = GS_ALL_DONE; 11767 break; 11768 11769 case OACC_HOST_DATA: 11770 case OACC_DATA: 11771 case OACC_KERNELS: 11772 case OACC_PARALLEL: 11773 case OMP_SECTIONS: 11774 case OMP_SINGLE: 11775 case OMP_TARGET: 11776 case OMP_TARGET_DATA: 11777 case OMP_TEAMS: 11778 gimplify_omp_workshare (expr_p, pre_p); 11779 ret = GS_ALL_DONE; 11780 break; 11781 11782 case OACC_ENTER_DATA: 11783 case OACC_EXIT_DATA: 11784 case OACC_UPDATE: 11785 case OMP_TARGET_UPDATE: 11786 case OMP_TARGET_ENTER_DATA: 11787 case OMP_TARGET_EXIT_DATA: 11788 gimplify_omp_target_update (expr_p, pre_p); 11789 ret = GS_ALL_DONE; 11790 break; 11791 11792 case OMP_SECTION: 11793 case OMP_MASTER: 11794 case OMP_TASKGROUP: 11795 case OMP_ORDERED: 11796 case OMP_CRITICAL: 11797 { 11798 gimple_seq body = NULL; 11799 gimple *g; 11800 11801 gimplify_and_add (OMP_BODY (*expr_p), &body); 11802 switch (TREE_CODE (*expr_p)) 11803 { 11804 case OMP_SECTION: 11805 g = gimple_build_omp_section (body); 11806 break; 11807 case OMP_MASTER: 11808 g = gimple_build_omp_master (body); 11809 break; 11810 case OMP_TASKGROUP: 11811 { 11812 gimple_seq cleanup = NULL; 11813 tree fn 11814 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END); 11815 g = gimple_build_call (fn, 0); 11816 gimple_seq_add_stmt (&cleanup, g); 11817 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY); 11818 body = NULL; 11819 gimple_seq_add_stmt (&body, g); 11820 g = gimple_build_omp_taskgroup (body); 11821 } 11822 break; 11823 case OMP_ORDERED: 11824 g = gimplify_omp_ordered (*expr_p, body); 11825 break; 11826 case OMP_CRITICAL: 11827 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p), 11828 pre_p, ORT_WORKSHARE, OMP_CRITICAL); 11829 gimplify_adjust_omp_clauses (pre_p, body, 11830 &OMP_CRITICAL_CLAUSES (*expr_p), 11831 OMP_CRITICAL); 11832 g = gimple_build_omp_critical (body, 11833 OMP_CRITICAL_NAME (*expr_p), 11834 OMP_CRITICAL_CLAUSES (*expr_p)); 11835 break; 11836 default: 11837 gcc_unreachable (); 11838 } 11839 gimplify_seq_add_stmt (pre_p, g); 11840 ret = GS_ALL_DONE; 11841 break; 11842 } 11843 11844 case OMP_ATOMIC: 11845 case OMP_ATOMIC_READ: 11846 case OMP_ATOMIC_CAPTURE_OLD: 11847 case OMP_ATOMIC_CAPTURE_NEW: 11848 ret = gimplify_omp_atomic (expr_p, pre_p); 11849 break; 11850 11851 case TRANSACTION_EXPR: 11852 ret = gimplify_transaction (expr_p, pre_p); 11853 break; 11854 11855 case TRUTH_AND_EXPR: 11856 case TRUTH_OR_EXPR: 11857 case TRUTH_XOR_EXPR: 11858 { 11859 tree orig_type = TREE_TYPE (*expr_p); 11860 tree new_type, xop0, xop1; 11861 *expr_p = gimple_boolify (*expr_p); 11862 new_type = TREE_TYPE (*expr_p); 11863 if (!useless_type_conversion_p (orig_type, new_type)) 11864 { 11865 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p); 11866 ret = GS_OK; 11867 break; 11868 } 11869 11870 /* Boolified binary truth expressions are semantically equivalent 11871 to bitwise binary expressions. Canonicalize them to the 11872 bitwise variant. */ 11873 switch (TREE_CODE (*expr_p)) 11874 { 11875 case TRUTH_AND_EXPR: 11876 TREE_SET_CODE (*expr_p, BIT_AND_EXPR); 11877 break; 11878 case TRUTH_OR_EXPR: 11879 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR); 11880 break; 11881 case TRUTH_XOR_EXPR: 11882 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR); 11883 break; 11884 default: 11885 break; 11886 } 11887 /* Now make sure that operands have compatible type to 11888 expression's new_type. */ 11889 xop0 = TREE_OPERAND (*expr_p, 0); 11890 xop1 = TREE_OPERAND (*expr_p, 1); 11891 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0))) 11892 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location, 11893 new_type, 11894 xop0); 11895 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1))) 11896 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location, 11897 new_type, 11898 xop1); 11899 /* Continue classified as tcc_binary. */ 11900 goto expr_2; 11901 } 11902 11903 case VEC_COND_EXPR: 11904 { 11905 enum gimplify_status r0, r1, r2; 11906 11907 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 11908 post_p, is_gimple_condexpr, fb_rvalue); 11909 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 11910 post_p, is_gimple_val, fb_rvalue); 11911 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, 11912 post_p, is_gimple_val, fb_rvalue); 11913 11914 ret = MIN (MIN (r0, r1), r2); 11915 recalculate_side_effects (*expr_p); 11916 } 11917 break; 11918 11919 case FMA_EXPR: 11920 case VEC_PERM_EXPR: 11921 /* Classified as tcc_expression. */ 11922 goto expr_3; 11923 11924 case BIT_INSERT_EXPR: 11925 /* Argument 3 is a constant. */ 11926 goto expr_2; 11927 11928 case POINTER_PLUS_EXPR: 11929 { 11930 enum gimplify_status r0, r1; 11931 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 11932 post_p, is_gimple_val, fb_rvalue); 11933 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 11934 post_p, is_gimple_val, fb_rvalue); 11935 recalculate_side_effects (*expr_p); 11936 ret = MIN (r0, r1); 11937 break; 11938 } 11939 11940 case CILK_SYNC_STMT: 11941 { 11942 if (!fn_contains_cilk_spawn_p (cfun)) 11943 { 11944 error_at (EXPR_LOCATION (*expr_p), 11945 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>"); 11946 ret = GS_ERROR; 11947 } 11948 else 11949 { 11950 gimplify_cilk_sync (expr_p, pre_p); 11951 ret = GS_ALL_DONE; 11952 } 11953 break; 11954 } 11955 11956 default: 11957 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p))) 11958 { 11959 case tcc_comparison: 11960 /* Handle comparison of objects of non scalar mode aggregates 11961 with a call to memcmp. It would be nice to only have to do 11962 this for variable-sized objects, but then we'd have to allow 11963 the same nest of reference nodes we allow for MODIFY_EXPR and 11964 that's too complex. 11965 11966 Compare scalar mode aggregates as scalar mode values. Using 11967 memcmp for them would be very inefficient at best, and is 11968 plain wrong if bitfields are involved. */ 11969 { 11970 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1)); 11971 11972 /* Vector comparisons need no boolification. */ 11973 if (TREE_CODE (type) == VECTOR_TYPE) 11974 goto expr_2; 11975 else if (!AGGREGATE_TYPE_P (type)) 11976 { 11977 tree org_type = TREE_TYPE (*expr_p); 11978 *expr_p = gimple_boolify (*expr_p); 11979 if (!useless_type_conversion_p (org_type, 11980 TREE_TYPE (*expr_p))) 11981 { 11982 *expr_p = fold_convert_loc (input_location, 11983 org_type, *expr_p); 11984 ret = GS_OK; 11985 } 11986 else 11987 goto expr_2; 11988 } 11989 else if (TYPE_MODE (type) != BLKmode) 11990 ret = gimplify_scalar_mode_aggregate_compare (expr_p); 11991 else 11992 ret = gimplify_variable_sized_compare (expr_p); 11993 11994 break; 11995 } 11996 11997 /* If *EXPR_P does not need to be special-cased, handle it 11998 according to its class. */ 11999 case tcc_unary: 12000 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 12001 post_p, is_gimple_val, fb_rvalue); 12002 break; 12003 12004 case tcc_binary: 12005 expr_2: 12006 { 12007 enum gimplify_status r0, r1; 12008 12009 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 12010 post_p, is_gimple_val, fb_rvalue); 12011 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 12012 post_p, is_gimple_val, fb_rvalue); 12013 12014 ret = MIN (r0, r1); 12015 break; 12016 } 12017 12018 expr_3: 12019 { 12020 enum gimplify_status r0, r1, r2; 12021 12022 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 12023 post_p, is_gimple_val, fb_rvalue); 12024 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 12025 post_p, is_gimple_val, fb_rvalue); 12026 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, 12027 post_p, is_gimple_val, fb_rvalue); 12028 12029 ret = MIN (MIN (r0, r1), r2); 12030 break; 12031 } 12032 12033 case tcc_declaration: 12034 case tcc_constant: 12035 ret = GS_ALL_DONE; 12036 goto dont_recalculate; 12037 12038 default: 12039 gcc_unreachable (); 12040 } 12041 12042 recalculate_side_effects (*expr_p); 12043 12044 dont_recalculate: 12045 break; 12046 } 12047 12048 gcc_assert (*expr_p || ret != GS_OK); 12049 } 12050 while (ret == GS_OK); 12051 12052 /* If we encountered an error_mark somewhere nested inside, either 12053 stub out the statement or propagate the error back out. */ 12054 if (ret == GS_ERROR) 12055 { 12056 if (is_statement) 12057 *expr_p = NULL; 12058 goto out; 12059 } 12060 12061 /* This was only valid as a return value from the langhook, which 12062 we handled. Make sure it doesn't escape from any other context. */ 12063 gcc_assert (ret != GS_UNHANDLED); 12064 12065 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p)) 12066 { 12067 /* We aren't looking for a value, and we don't have a valid 12068 statement. If it doesn't have side-effects, throw it away. 12069 We can also get here with code such as "*&&L;", where L is 12070 a LABEL_DECL that is marked as FORCED_LABEL. */ 12071 if (TREE_CODE (*expr_p) == LABEL_DECL 12072 || !TREE_SIDE_EFFECTS (*expr_p)) 12073 *expr_p = NULL; 12074 else if (!TREE_THIS_VOLATILE (*expr_p)) 12075 { 12076 /* This is probably a _REF that contains something nested that 12077 has side effects. Recurse through the operands to find it. */ 12078 enum tree_code code = TREE_CODE (*expr_p); 12079 12080 switch (code) 12081 { 12082 case COMPONENT_REF: 12083 case REALPART_EXPR: 12084 case IMAGPART_EXPR: 12085 case VIEW_CONVERT_EXPR: 12086 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 12087 gimple_test_f, fallback); 12088 break; 12089 12090 case ARRAY_REF: 12091 case ARRAY_RANGE_REF: 12092 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 12093 gimple_test_f, fallback); 12094 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, 12095 gimple_test_f, fallback); 12096 break; 12097 12098 default: 12099 /* Anything else with side-effects must be converted to 12100 a valid statement before we get here. */ 12101 gcc_unreachable (); 12102 } 12103 12104 *expr_p = NULL; 12105 } 12106 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p)) 12107 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode) 12108 { 12109 /* Historically, the compiler has treated a bare reference 12110 to a non-BLKmode volatile lvalue as forcing a load. */ 12111 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p)); 12112 12113 /* Normally, we do not want to create a temporary for a 12114 TREE_ADDRESSABLE type because such a type should not be 12115 copied by bitwise-assignment. However, we make an 12116 exception here, as all we are doing here is ensuring that 12117 we read the bytes that make up the type. We use 12118 create_tmp_var_raw because create_tmp_var will abort when 12119 given a TREE_ADDRESSABLE type. */ 12120 tree tmp = create_tmp_var_raw (type, "vol"); 12121 gimple_add_tmp_var (tmp); 12122 gimplify_assign (tmp, *expr_p, pre_p); 12123 *expr_p = NULL; 12124 } 12125 else 12126 /* We can't do anything useful with a volatile reference to 12127 an incomplete type, so just throw it away. Likewise for 12128 a BLKmode type, since any implicit inner load should 12129 already have been turned into an explicit one by the 12130 gimplification process. */ 12131 *expr_p = NULL; 12132 } 12133 12134 /* If we are gimplifying at the statement level, we're done. Tack 12135 everything together and return. */ 12136 if (fallback == fb_none || is_statement) 12137 { 12138 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear 12139 it out for GC to reclaim it. */ 12140 *expr_p = NULL_TREE; 12141 12142 if (!gimple_seq_empty_p (internal_pre) 12143 || !gimple_seq_empty_p (internal_post)) 12144 { 12145 gimplify_seq_add_seq (&internal_pre, internal_post); 12146 gimplify_seq_add_seq (pre_p, internal_pre); 12147 } 12148 12149 /* The result of gimplifying *EXPR_P is going to be the last few 12150 statements in *PRE_P and *POST_P. Add location information 12151 to all the statements that were added by the gimplification 12152 helpers. */ 12153 if (!gimple_seq_empty_p (*pre_p)) 12154 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location); 12155 12156 if (!gimple_seq_empty_p (*post_p)) 12157 annotate_all_with_location_after (*post_p, post_last_gsi, 12158 input_location); 12159 12160 goto out; 12161 } 12162 12163 #ifdef ENABLE_GIMPLE_CHECKING 12164 if (*expr_p) 12165 { 12166 enum tree_code code = TREE_CODE (*expr_p); 12167 /* These expressions should already be in gimple IR form. */ 12168 gcc_assert (code != MODIFY_EXPR 12169 && code != ASM_EXPR 12170 && code != BIND_EXPR 12171 && code != CATCH_EXPR 12172 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr) 12173 && code != EH_FILTER_EXPR 12174 && code != GOTO_EXPR 12175 && code != LABEL_EXPR 12176 && code != LOOP_EXPR 12177 && code != SWITCH_EXPR 12178 && code != TRY_FINALLY_EXPR 12179 && code != OACC_PARALLEL 12180 && code != OACC_KERNELS 12181 && code != OACC_DATA 12182 && code != OACC_HOST_DATA 12183 && code != OACC_DECLARE 12184 && code != OACC_UPDATE 12185 && code != OACC_ENTER_DATA 12186 && code != OACC_EXIT_DATA 12187 && code != OACC_CACHE 12188 && code != OMP_CRITICAL 12189 && code != OMP_FOR 12190 && code != OACC_LOOP 12191 && code != OMP_MASTER 12192 && code != OMP_TASKGROUP 12193 && code != OMP_ORDERED 12194 && code != OMP_PARALLEL 12195 && code != OMP_SECTIONS 12196 && code != OMP_SECTION 12197 && code != OMP_SINGLE); 12198 } 12199 #endif 12200 12201 /* Otherwise we're gimplifying a subexpression, so the resulting 12202 value is interesting. If it's a valid operand that matches 12203 GIMPLE_TEST_F, we're done. Unless we are handling some 12204 post-effects internally; if that's the case, we need to copy into 12205 a temporary before adding the post-effects to POST_P. */ 12206 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p)) 12207 goto out; 12208 12209 /* Otherwise, we need to create a new temporary for the gimplified 12210 expression. */ 12211 12212 /* We can't return an lvalue if we have an internal postqueue. The 12213 object the lvalue refers to would (probably) be modified by the 12214 postqueue; we need to copy the value out first, which means an 12215 rvalue. */ 12216 if ((fallback & fb_lvalue) 12217 && gimple_seq_empty_p (internal_post) 12218 && is_gimple_addressable (*expr_p)) 12219 { 12220 /* An lvalue will do. Take the address of the expression, store it 12221 in a temporary, and replace the expression with an INDIRECT_REF of 12222 that temporary. */ 12223 tmp = build_fold_addr_expr_loc (input_location, *expr_p); 12224 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue); 12225 *expr_p = build_simple_mem_ref (tmp); 12226 } 12227 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p)) 12228 { 12229 /* An rvalue will do. Assign the gimplified expression into a 12230 new temporary TMP and replace the original expression with 12231 TMP. First, make sure that the expression has a type so that 12232 it can be assigned into a temporary. */ 12233 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p))); 12234 *expr_p = get_formal_tmp_var (*expr_p, pre_p); 12235 } 12236 else 12237 { 12238 #ifdef ENABLE_GIMPLE_CHECKING 12239 if (!(fallback & fb_mayfail)) 12240 { 12241 fprintf (stderr, "gimplification failed:\n"); 12242 print_generic_expr (stderr, *expr_p, 0); 12243 debug_tree (*expr_p); 12244 internal_error ("gimplification failed"); 12245 } 12246 #endif 12247 gcc_assert (fallback & fb_mayfail); 12248 12249 /* If this is an asm statement, and the user asked for the 12250 impossible, don't die. Fail and let gimplify_asm_expr 12251 issue an error. */ 12252 ret = GS_ERROR; 12253 goto out; 12254 } 12255 12256 /* Make sure the temporary matches our predicate. */ 12257 gcc_assert ((*gimple_test_f) (*expr_p)); 12258 12259 if (!gimple_seq_empty_p (internal_post)) 12260 { 12261 annotate_all_with_location (internal_post, input_location); 12262 gimplify_seq_add_seq (pre_p, internal_post); 12263 } 12264 12265 out: 12266 input_location = saved_location; 12267 return ret; 12268 } 12269 12270 /* Like gimplify_expr but make sure the gimplified result is not itself 12271 a SSA name (but a decl if it were). Temporaries required by 12272 evaluating *EXPR_P may be still SSA names. */ 12273 12274 static enum gimplify_status 12275 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 12276 bool (*gimple_test_f) (tree), fallback_t fallback, 12277 bool allow_ssa) 12278 { 12279 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME; 12280 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p, 12281 gimple_test_f, fallback); 12282 if (! allow_ssa 12283 && TREE_CODE (*expr_p) == SSA_NAME) 12284 { 12285 tree name = *expr_p; 12286 if (was_ssa_name_p) 12287 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false); 12288 else 12289 { 12290 /* Avoid the extra copy if possible. */ 12291 *expr_p = create_tmp_reg (TREE_TYPE (name)); 12292 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p); 12293 release_ssa_name (name); 12294 } 12295 } 12296 return ret; 12297 } 12298 12299 /* Look through TYPE for variable-sized objects and gimplify each such 12300 size that we find. Add to LIST_P any statements generated. */ 12301 12302 void 12303 gimplify_type_sizes (tree type, gimple_seq *list_p) 12304 { 12305 tree field, t; 12306 12307 if (type == NULL || type == error_mark_node) 12308 return; 12309 12310 /* We first do the main variant, then copy into any other variants. */ 12311 type = TYPE_MAIN_VARIANT (type); 12312 12313 /* Avoid infinite recursion. */ 12314 if (TYPE_SIZES_GIMPLIFIED (type)) 12315 return; 12316 12317 TYPE_SIZES_GIMPLIFIED (type) = 1; 12318 12319 switch (TREE_CODE (type)) 12320 { 12321 case INTEGER_TYPE: 12322 case ENUMERAL_TYPE: 12323 case BOOLEAN_TYPE: 12324 case REAL_TYPE: 12325 case FIXED_POINT_TYPE: 12326 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p); 12327 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p); 12328 12329 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) 12330 { 12331 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type); 12332 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type); 12333 } 12334 break; 12335 12336 case ARRAY_TYPE: 12337 /* These types may not have declarations, so handle them here. */ 12338 gimplify_type_sizes (TREE_TYPE (type), list_p); 12339 gimplify_type_sizes (TYPE_DOMAIN (type), list_p); 12340 /* Ensure VLA bounds aren't removed, for -O0 they should be variables 12341 with assigned stack slots, for -O1+ -g they should be tracked 12342 by VTA. */ 12343 if (!(TYPE_NAME (type) 12344 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL 12345 && DECL_IGNORED_P (TYPE_NAME (type))) 12346 && TYPE_DOMAIN (type) 12347 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type))) 12348 { 12349 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type)); 12350 if (t && VAR_P (t) && DECL_ARTIFICIAL (t)) 12351 DECL_IGNORED_P (t) = 0; 12352 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); 12353 if (t && VAR_P (t) && DECL_ARTIFICIAL (t)) 12354 DECL_IGNORED_P (t) = 0; 12355 } 12356 break; 12357 12358 case RECORD_TYPE: 12359 case UNION_TYPE: 12360 case QUAL_UNION_TYPE: 12361 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) 12362 if (TREE_CODE (field) == FIELD_DECL) 12363 { 12364 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p); 12365 gimplify_one_sizepos (&DECL_SIZE (field), list_p); 12366 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p); 12367 gimplify_type_sizes (TREE_TYPE (field), list_p); 12368 } 12369 break; 12370 12371 case POINTER_TYPE: 12372 case REFERENCE_TYPE: 12373 /* We used to recurse on the pointed-to type here, which turned out to 12374 be incorrect because its definition might refer to variables not 12375 yet initialized at this point if a forward declaration is involved. 12376 12377 It was actually useful for anonymous pointed-to types to ensure 12378 that the sizes evaluation dominates every possible later use of the 12379 values. Restricting to such types here would be safe since there 12380 is no possible forward declaration around, but would introduce an 12381 undesirable middle-end semantic to anonymity. We then defer to 12382 front-ends the responsibility of ensuring that the sizes are 12383 evaluated both early and late enough, e.g. by attaching artificial 12384 type declarations to the tree. */ 12385 break; 12386 12387 default: 12388 break; 12389 } 12390 12391 gimplify_one_sizepos (&TYPE_SIZE (type), list_p); 12392 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p); 12393 12394 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) 12395 { 12396 TYPE_SIZE (t) = TYPE_SIZE (type); 12397 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type); 12398 TYPE_SIZES_GIMPLIFIED (t) = 1; 12399 } 12400 } 12401 12402 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P, 12403 a size or position, has had all of its SAVE_EXPRs evaluated. 12404 We add any required statements to *STMT_P. */ 12405 12406 void 12407 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p) 12408 { 12409 tree expr = *expr_p; 12410 12411 /* We don't do anything if the value isn't there, is constant, or contains 12412 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already 12413 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier 12414 will want to replace it with a new variable, but that will cause problems 12415 if this type is from outside the function. It's OK to have that here. */ 12416 if (is_gimple_sizepos (expr)) 12417 return; 12418 12419 *expr_p = unshare_expr (expr); 12420 12421 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed 12422 if the def vanishes. */ 12423 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false); 12424 } 12425 12426 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node 12427 containing the sequence of corresponding GIMPLE statements. If DO_PARMS 12428 is true, also gimplify the parameters. */ 12429 12430 gbind * 12431 gimplify_body (tree fndecl, bool do_parms) 12432 { 12433 location_t saved_location = input_location; 12434 gimple_seq parm_stmts, seq; 12435 gimple *outer_stmt; 12436 gbind *outer_bind; 12437 struct cgraph_node *cgn; 12438 12439 timevar_push (TV_TREE_GIMPLIFY); 12440 12441 init_tree_ssa (cfun); 12442 12443 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during 12444 gimplification. */ 12445 default_rtl_profile (); 12446 12447 gcc_assert (gimplify_ctxp == NULL); 12448 push_gimplify_context (true); 12449 12450 if (flag_openacc || flag_openmp) 12451 { 12452 gcc_assert (gimplify_omp_ctxp == NULL); 12453 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl))) 12454 gimplify_omp_ctxp = new_omp_context (ORT_TARGET); 12455 } 12456 12457 /* Unshare most shared trees in the body and in that of any nested functions. 12458 It would seem we don't have to do this for nested functions because 12459 they are supposed to be output and then the outer function gimplified 12460 first, but the g++ front end doesn't always do it that way. */ 12461 unshare_body (fndecl); 12462 unvisit_body (fndecl); 12463 12464 cgn = cgraph_node::get (fndecl); 12465 if (cgn && cgn->origin) 12466 nonlocal_vlas = new hash_set<tree>; 12467 12468 /* Make sure input_location isn't set to something weird. */ 12469 input_location = DECL_SOURCE_LOCATION (fndecl); 12470 12471 /* Resolve callee-copies. This has to be done before processing 12472 the body so that DECL_VALUE_EXPR gets processed correctly. */ 12473 parm_stmts = do_parms ? gimplify_parameters () : NULL; 12474 12475 /* Gimplify the function's body. */ 12476 seq = NULL; 12477 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq); 12478 outer_stmt = gimple_seq_first_stmt (seq); 12479 if (!outer_stmt) 12480 { 12481 outer_stmt = gimple_build_nop (); 12482 gimplify_seq_add_stmt (&seq, outer_stmt); 12483 } 12484 12485 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is 12486 not the case, wrap everything in a GIMPLE_BIND to make it so. */ 12487 if (gimple_code (outer_stmt) == GIMPLE_BIND 12488 && gimple_seq_first (seq) == gimple_seq_last (seq)) 12489 outer_bind = as_a <gbind *> (outer_stmt); 12490 else 12491 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL); 12492 12493 DECL_SAVED_TREE (fndecl) = NULL_TREE; 12494 12495 /* If we had callee-copies statements, insert them at the beginning 12496 of the function and clear DECL_VALUE_EXPR_P on the parameters. */ 12497 if (!gimple_seq_empty_p (parm_stmts)) 12498 { 12499 tree parm; 12500 12501 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind)); 12502 gimple_bind_set_body (outer_bind, parm_stmts); 12503 12504 for (parm = DECL_ARGUMENTS (current_function_decl); 12505 parm; parm = DECL_CHAIN (parm)) 12506 if (DECL_HAS_VALUE_EXPR_P (parm)) 12507 { 12508 DECL_HAS_VALUE_EXPR_P (parm) = 0; 12509 DECL_IGNORED_P (parm) = 0; 12510 } 12511 } 12512 12513 if (nonlocal_vlas) 12514 { 12515 if (nonlocal_vla_vars) 12516 { 12517 /* tree-nested.c may later on call declare_vars (..., true); 12518 which relies on BLOCK_VARS chain to be the tail of the 12519 gimple_bind_vars chain. Ensure we don't violate that 12520 assumption. */ 12521 if (gimple_bind_block (outer_bind) 12522 == DECL_INITIAL (current_function_decl)) 12523 declare_vars (nonlocal_vla_vars, outer_bind, true); 12524 else 12525 BLOCK_VARS (DECL_INITIAL (current_function_decl)) 12526 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)), 12527 nonlocal_vla_vars); 12528 nonlocal_vla_vars = NULL_TREE; 12529 } 12530 delete nonlocal_vlas; 12531 nonlocal_vlas = NULL; 12532 } 12533 12534 if ((flag_openacc || flag_openmp || flag_openmp_simd) 12535 && gimplify_omp_ctxp) 12536 { 12537 delete_omp_context (gimplify_omp_ctxp); 12538 gimplify_omp_ctxp = NULL; 12539 } 12540 12541 pop_gimplify_context (outer_bind); 12542 gcc_assert (gimplify_ctxp == NULL); 12543 12544 if (flag_checking && !seen_error ()) 12545 verify_gimple_in_seq (gimple_bind_body (outer_bind)); 12546 12547 timevar_pop (TV_TREE_GIMPLIFY); 12548 input_location = saved_location; 12549 12550 return outer_bind; 12551 } 12552 12553 typedef char *char_p; /* For DEF_VEC_P. */ 12554 12555 /* Return whether we should exclude FNDECL from instrumentation. */ 12556 12557 static bool 12558 flag_instrument_functions_exclude_p (tree fndecl) 12559 { 12560 vec<char_p> *v; 12561 12562 v = (vec<char_p> *) flag_instrument_functions_exclude_functions; 12563 if (v && v->length () > 0) 12564 { 12565 const char *name; 12566 int i; 12567 char *s; 12568 12569 name = lang_hooks.decl_printable_name (fndecl, 0); 12570 FOR_EACH_VEC_ELT (*v, i, s) 12571 if (strstr (name, s) != NULL) 12572 return true; 12573 } 12574 12575 v = (vec<char_p> *) flag_instrument_functions_exclude_files; 12576 if (v && v->length () > 0) 12577 { 12578 const char *name; 12579 int i; 12580 char *s; 12581 12582 name = DECL_SOURCE_FILE (fndecl); 12583 FOR_EACH_VEC_ELT (*v, i, s) 12584 if (strstr (name, s) != NULL) 12585 return true; 12586 } 12587 12588 return false; 12589 } 12590 12591 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL 12592 node for the function we want to gimplify. 12593 12594 Return the sequence of GIMPLE statements corresponding to the body 12595 of FNDECL. */ 12596 12597 void 12598 gimplify_function_tree (tree fndecl) 12599 { 12600 tree parm, ret; 12601 gimple_seq seq; 12602 gbind *bind; 12603 12604 gcc_assert (!gimple_body (fndecl)); 12605 12606 if (DECL_STRUCT_FUNCTION (fndecl)) 12607 push_cfun (DECL_STRUCT_FUNCTION (fndecl)); 12608 else 12609 push_struct_function (fndecl); 12610 12611 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr 12612 if necessary. */ 12613 cfun->curr_properties |= PROP_gimple_lva; 12614 12615 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm)) 12616 { 12617 /* Preliminarily mark non-addressed complex variables as eligible 12618 for promotion to gimple registers. We'll transform their uses 12619 as we find them. */ 12620 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE 12621 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE) 12622 && !TREE_THIS_VOLATILE (parm) 12623 && !needs_to_live_in_memory (parm)) 12624 DECL_GIMPLE_REG_P (parm) = 1; 12625 } 12626 12627 ret = DECL_RESULT (fndecl); 12628 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE 12629 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE) 12630 && !needs_to_live_in_memory (ret)) 12631 DECL_GIMPLE_REG_P (ret) = 1; 12632 12633 if (asan_sanitize_use_after_scope () && !asan_no_sanitize_address_p ()) 12634 asan_poisoned_variables = new hash_set<tree> (); 12635 bind = gimplify_body (fndecl, true); 12636 if (asan_poisoned_variables) 12637 { 12638 delete asan_poisoned_variables; 12639 asan_poisoned_variables = NULL; 12640 } 12641 12642 /* The tree body of the function is no longer needed, replace it 12643 with the new GIMPLE body. */ 12644 seq = NULL; 12645 gimple_seq_add_stmt (&seq, bind); 12646 gimple_set_body (fndecl, seq); 12647 12648 /* If we're instrumenting function entry/exit, then prepend the call to 12649 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to 12650 catch the exit hook. */ 12651 /* ??? Add some way to ignore exceptions for this TFE. */ 12652 if (flag_instrument_function_entry_exit 12653 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) 12654 /* Do not instrument extern inline functions. */ 12655 && !(DECL_DECLARED_INLINE_P (fndecl) 12656 && DECL_EXTERNAL (fndecl) 12657 && DECL_DISREGARD_INLINE_LIMITS (fndecl)) 12658 && !flag_instrument_functions_exclude_p (fndecl)) 12659 { 12660 tree x; 12661 gbind *new_bind; 12662 gimple *tf; 12663 gimple_seq cleanup = NULL, body = NULL; 12664 tree tmp_var; 12665 gcall *call; 12666 12667 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS); 12668 call = gimple_build_call (x, 1, integer_zero_node); 12669 tmp_var = create_tmp_var (ptr_type_node, "return_addr"); 12670 gimple_call_set_lhs (call, tmp_var); 12671 gimplify_seq_add_stmt (&cleanup, call); 12672 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT); 12673 call = gimple_build_call (x, 2, 12674 build_fold_addr_expr (current_function_decl), 12675 tmp_var); 12676 gimplify_seq_add_stmt (&cleanup, call); 12677 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY); 12678 12679 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS); 12680 call = gimple_build_call (x, 1, integer_zero_node); 12681 tmp_var = create_tmp_var (ptr_type_node, "return_addr"); 12682 gimple_call_set_lhs (call, tmp_var); 12683 gimplify_seq_add_stmt (&body, call); 12684 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER); 12685 call = gimple_build_call (x, 2, 12686 build_fold_addr_expr (current_function_decl), 12687 tmp_var); 12688 gimplify_seq_add_stmt (&body, call); 12689 gimplify_seq_add_stmt (&body, tf); 12690 new_bind = gimple_build_bind (NULL, body, NULL); 12691 12692 /* Replace the current function body with the body 12693 wrapped in the try/finally TF. */ 12694 seq = NULL; 12695 gimple_seq_add_stmt (&seq, new_bind); 12696 gimple_set_body (fndecl, seq); 12697 bind = new_bind; 12698 } 12699 12700 if ((flag_sanitize & SANITIZE_THREAD) != 0 12701 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl))) 12702 { 12703 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0); 12704 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY); 12705 gbind *new_bind = gimple_build_bind (NULL, tf, NULL); 12706 /* Replace the current function body with the body 12707 wrapped in the try/finally TF. */ 12708 seq = NULL; 12709 gimple_seq_add_stmt (&seq, new_bind); 12710 gimple_set_body (fndecl, seq); 12711 } 12712 12713 DECL_SAVED_TREE (fndecl) = NULL_TREE; 12714 cfun->curr_properties |= PROP_gimple_any; 12715 12716 pop_cfun (); 12717 12718 dump_function (TDI_generic, fndecl); 12719 } 12720 12721 /* Return a dummy expression of type TYPE in order to keep going after an 12722 error. */ 12723 12724 static tree 12725 dummy_object (tree type) 12726 { 12727 tree t = build_int_cst (build_pointer_type (type), 0); 12728 return build2 (MEM_REF, type, t, t); 12729 } 12730 12731 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a 12732 builtin function, but a very special sort of operator. */ 12733 12734 enum gimplify_status 12735 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, 12736 gimple_seq *post_p ATTRIBUTE_UNUSED) 12737 { 12738 tree promoted_type, have_va_type; 12739 tree valist = TREE_OPERAND (*expr_p, 0); 12740 tree type = TREE_TYPE (*expr_p); 12741 tree t, tag, aptag; 12742 location_t loc = EXPR_LOCATION (*expr_p); 12743 12744 /* Verify that valist is of the proper type. */ 12745 have_va_type = TREE_TYPE (valist); 12746 if (have_va_type == error_mark_node) 12747 return GS_ERROR; 12748 have_va_type = targetm.canonical_va_list_type (have_va_type); 12749 if (have_va_type == NULL_TREE 12750 && POINTER_TYPE_P (TREE_TYPE (valist))) 12751 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */ 12752 have_va_type 12753 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist))); 12754 gcc_assert (have_va_type != NULL_TREE); 12755 12756 /* Generate a diagnostic for requesting data of a type that cannot 12757 be passed through `...' due to type promotion at the call site. */ 12758 if ((promoted_type = lang_hooks.types.type_promotes_to (type)) 12759 != type) 12760 { 12761 static bool gave_help; 12762 bool warned; 12763 /* Use the expansion point to handle cases such as passing bool (defined 12764 in a system header) through `...'. */ 12765 source_location xloc 12766 = expansion_point_location_if_in_system_header (loc); 12767 12768 /* Unfortunately, this is merely undefined, rather than a constraint 12769 violation, so we cannot make this an error. If this call is never 12770 executed, the program is still strictly conforming. */ 12771 warned = warning_at (xloc, 0, 12772 "%qT is promoted to %qT when passed through %<...%>", 12773 type, promoted_type); 12774 if (!gave_help && warned) 12775 { 12776 gave_help = true; 12777 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)", 12778 promoted_type, type); 12779 } 12780 12781 /* We can, however, treat "undefined" any way we please. 12782 Call abort to encourage the user to fix the program. */ 12783 if (warned) 12784 inform (xloc, "if this code is reached, the program will abort"); 12785 /* Before the abort, allow the evaluation of the va_list 12786 expression to exit or longjmp. */ 12787 gimplify_and_add (valist, pre_p); 12788 t = build_call_expr_loc (loc, 12789 builtin_decl_implicit (BUILT_IN_TRAP), 0); 12790 gimplify_and_add (t, pre_p); 12791 12792 /* This is dead code, but go ahead and finish so that the 12793 mode of the result comes out right. */ 12794 *expr_p = dummy_object (type); 12795 return GS_ALL_DONE; 12796 } 12797 12798 tag = build_int_cst (build_pointer_type (type), 0); 12799 aptag = build_int_cst (TREE_TYPE (valist), 0); 12800 12801 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3, 12802 valist, tag, aptag); 12803 12804 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG 12805 needs to be expanded. */ 12806 cfun->curr_properties &= ~PROP_gimple_lva; 12807 12808 return GS_OK; 12809 } 12810 12811 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P. 12812 12813 DST/SRC are the destination and source respectively. You can pass 12814 ungimplified trees in DST or SRC, in which case they will be 12815 converted to a gimple operand if necessary. 12816 12817 This function returns the newly created GIMPLE_ASSIGN tuple. */ 12818 12819 gimple * 12820 gimplify_assign (tree dst, tree src, gimple_seq *seq_p) 12821 { 12822 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); 12823 gimplify_and_add (t, seq_p); 12824 ggc_free (t); 12825 return gimple_seq_last_stmt (*seq_p); 12826 } 12827 12828 inline hashval_t 12829 gimplify_hasher::hash (const elt_t *p) 12830 { 12831 tree t = p->val; 12832 return iterative_hash_expr (t, 0); 12833 } 12834 12835 inline bool 12836 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2) 12837 { 12838 tree t1 = p1->val; 12839 tree t2 = p2->val; 12840 enum tree_code code = TREE_CODE (t1); 12841 12842 if (TREE_CODE (t2) != code 12843 || TREE_TYPE (t1) != TREE_TYPE (t2)) 12844 return false; 12845 12846 if (!operand_equal_p (t1, t2, 0)) 12847 return false; 12848 12849 /* Only allow them to compare equal if they also hash equal; otherwise 12850 results are nondeterminate, and we fail bootstrap comparison. */ 12851 gcc_checking_assert (hash (p1) == hash (p2)); 12852 12853 return true; 12854 } 12855