1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees 2 tree representation into the GIMPLE form. 3 Copyright (C) 2002-2017 Free Software Foundation, Inc. 4 Major work done by Sebastian Pop <s.pop@laposte.net>, 5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>. 6 7 This file is part of GCC. 8 9 GCC is free software; you can redistribute it and/or modify it under 10 the terms of the GNU General Public License as published by the Free 11 Software Foundation; either version 3, or (at your option) any later 12 version. 13 14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 15 WARRANTY; without even the implied warranty of MERCHANTABILITY or 16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 17 for more details. 18 19 You should have received a copy of the GNU General Public License 20 along with GCC; see the file COPYING3. If not see 21 <http://www.gnu.org/licenses/>. */ 22 23 #include "config.h" 24 #include "system.h" 25 #include "coretypes.h" 26 #include "backend.h" 27 #include "target.h" 28 #include "rtl.h" 29 #include "tree.h" 30 #include "gimple.h" 31 #include "gimple-predict.h" 32 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */ 33 #include "ssa.h" 34 #include "cgraph.h" 35 #include "tree-pretty-print.h" 36 #include "diagnostic-core.h" 37 #include "alias.h" 38 #include "fold-const.h" 39 #include "calls.h" 40 #include "varasm.h" 41 #include "stmt.h" 42 #include "expr.h" 43 #include "gimple-fold.h" 44 #include "tree-eh.h" 45 #include "gimplify.h" 46 #include "gimple-iterator.h" 47 #include "stor-layout.h" 48 #include "print-tree.h" 49 #include "tree-iterator.h" 50 #include "tree-inline.h" 51 #include "langhooks.h" 52 #include "tree-cfg.h" 53 #include "tree-ssa.h" 54 #include "omp-general.h" 55 #include "omp-low.h" 56 #include "gimple-low.h" 57 #include "cilk.h" 58 #include "gomp-constants.h" 59 #include "tree-dump.h" 60 #include "gimple-walk.h" 61 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */ 62 #include "builtins.h" 63 #include "asan.h" 64 #include "dbgcnt.h" 65 66 /* Hash set of poisoned variables in a bind expr. */ 67 static hash_set<tree> *asan_poisoned_variables = NULL; 68 69 enum gimplify_omp_var_data 70 { 71 GOVD_SEEN = 1, 72 GOVD_EXPLICIT = 2, 73 GOVD_SHARED = 4, 74 GOVD_PRIVATE = 8, 75 GOVD_FIRSTPRIVATE = 16, 76 GOVD_LASTPRIVATE = 32, 77 GOVD_REDUCTION = 64, 78 GOVD_LOCAL = 128, 79 GOVD_MAP = 256, 80 GOVD_DEBUG_PRIVATE = 512, 81 GOVD_PRIVATE_OUTER_REF = 1024, 82 GOVD_LINEAR = 2048, 83 GOVD_ALIGNED = 4096, 84 85 /* Flag for GOVD_MAP: don't copy back. */ 86 GOVD_MAP_TO_ONLY = 8192, 87 88 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */ 89 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384, 90 91 GOVD_MAP_0LEN_ARRAY = 32768, 92 93 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */ 94 GOVD_MAP_ALWAYS_TO = 65536, 95 96 /* Flag for shared vars that are or might be stored to in the region. */ 97 GOVD_WRITTEN = 131072, 98 99 /* Flag for GOVD_MAP, if it is a forced mapping. */ 100 GOVD_MAP_FORCE = 262144, 101 102 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE 103 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR 104 | GOVD_LOCAL) 105 }; 106 107 108 enum omp_region_type 109 { 110 ORT_WORKSHARE = 0x00, 111 ORT_SIMD = 0x01, 112 113 ORT_PARALLEL = 0x02, 114 ORT_COMBINED_PARALLEL = 0x03, 115 116 ORT_TASK = 0x04, 117 ORT_UNTIED_TASK = 0x05, 118 119 ORT_TEAMS = 0x08, 120 ORT_COMBINED_TEAMS = 0x09, 121 122 /* Data region. */ 123 ORT_TARGET_DATA = 0x10, 124 125 /* Data region with offloading. */ 126 ORT_TARGET = 0x20, 127 ORT_COMBINED_TARGET = 0x21, 128 129 /* OpenACC variants. */ 130 ORT_ACC = 0x40, /* A generic OpenACC region. */ 131 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */ 132 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */ 133 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */ 134 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */ 135 136 /* Dummy OpenMP region, used to disable expansion of 137 DECL_VALUE_EXPRs in taskloop pre body. */ 138 ORT_NONE = 0x100 139 }; 140 141 /* Gimplify hashtable helper. */ 142 143 struct gimplify_hasher : free_ptr_hash <elt_t> 144 { 145 static inline hashval_t hash (const elt_t *); 146 static inline bool equal (const elt_t *, const elt_t *); 147 }; 148 149 struct gimplify_ctx 150 { 151 struct gimplify_ctx *prev_context; 152 153 vec<gbind *> bind_expr_stack; 154 tree temps; 155 gimple_seq conditional_cleanups; 156 tree exit_label; 157 tree return_temp; 158 159 vec<tree> case_labels; 160 hash_set<tree> *live_switch_vars; 161 /* The formal temporary table. Should this be persistent? */ 162 hash_table<gimplify_hasher> *temp_htab; 163 164 int conditions; 165 unsigned into_ssa : 1; 166 unsigned allow_rhs_cond_expr : 1; 167 unsigned in_cleanup_point_expr : 1; 168 unsigned keep_stack : 1; 169 unsigned save_stack : 1; 170 unsigned in_switch_expr : 1; 171 }; 172 173 struct gimplify_omp_ctx 174 { 175 struct gimplify_omp_ctx *outer_context; 176 splay_tree variables; 177 hash_set<tree> *privatized_types; 178 /* Iteration variables in an OMP_FOR. */ 179 vec<tree> loop_iter_var; 180 location_t location; 181 enum omp_clause_default_kind default_kind; 182 enum omp_region_type region_type; 183 bool combined_loop; 184 bool distribute; 185 bool target_map_scalars_firstprivate; 186 bool target_map_pointers_as_0len_arrays; 187 bool target_firstprivatize_array_bases; 188 bool add_safelen1; 189 }; 190 191 static struct gimplify_ctx *gimplify_ctxp; 192 static struct gimplify_omp_ctx *gimplify_omp_ctxp; 193 194 /* Forward declaration. */ 195 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool); 196 static hash_map<tree, tree> *oacc_declare_returns; 197 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *, 198 bool (*) (tree), fallback_t, bool); 199 200 /* Shorter alias name for the above function for use in gimplify.c 201 only. */ 202 203 static inline void 204 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs) 205 { 206 gimple_seq_add_stmt_without_update (seq_p, gs); 207 } 208 209 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is 210 NULL, a new sequence is allocated. This function is 211 similar to gimple_seq_add_seq, but does not scan the operands. 212 During gimplification, we need to manipulate statement sequences 213 before the def/use vectors have been constructed. */ 214 215 static void 216 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src) 217 { 218 gimple_stmt_iterator si; 219 220 if (src == NULL) 221 return; 222 223 si = gsi_last (*dst_p); 224 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT); 225 } 226 227 228 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing 229 and popping gimplify contexts. */ 230 231 static struct gimplify_ctx *ctx_pool = NULL; 232 233 /* Return a gimplify context struct from the pool. */ 234 235 static inline struct gimplify_ctx * 236 ctx_alloc (void) 237 { 238 struct gimplify_ctx * c = ctx_pool; 239 240 if (c) 241 ctx_pool = c->prev_context; 242 else 243 c = XNEW (struct gimplify_ctx); 244 245 memset (c, '\0', sizeof (*c)); 246 return c; 247 } 248 249 /* Put gimplify context C back into the pool. */ 250 251 static inline void 252 ctx_free (struct gimplify_ctx *c) 253 { 254 c->prev_context = ctx_pool; 255 ctx_pool = c; 256 } 257 258 /* Free allocated ctx stack memory. */ 259 260 void 261 free_gimplify_stack (void) 262 { 263 struct gimplify_ctx *c; 264 265 while ((c = ctx_pool)) 266 { 267 ctx_pool = c->prev_context; 268 free (c); 269 } 270 } 271 272 273 /* Set up a context for the gimplifier. */ 274 275 void 276 push_gimplify_context (bool in_ssa, bool rhs_cond_ok) 277 { 278 struct gimplify_ctx *c = ctx_alloc (); 279 280 c->prev_context = gimplify_ctxp; 281 gimplify_ctxp = c; 282 gimplify_ctxp->into_ssa = in_ssa; 283 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok; 284 } 285 286 /* Tear down a context for the gimplifier. If BODY is non-null, then 287 put the temporaries into the outer BIND_EXPR. Otherwise, put them 288 in the local_decls. 289 290 BODY is not a sequence, but the first tuple in a sequence. */ 291 292 void 293 pop_gimplify_context (gimple *body) 294 { 295 struct gimplify_ctx *c = gimplify_ctxp; 296 297 gcc_assert (c 298 && (!c->bind_expr_stack.exists () 299 || c->bind_expr_stack.is_empty ())); 300 c->bind_expr_stack.release (); 301 gimplify_ctxp = c->prev_context; 302 303 if (body) 304 declare_vars (c->temps, body, false); 305 else 306 record_vars (c->temps); 307 308 delete c->temp_htab; 309 c->temp_htab = NULL; 310 ctx_free (c); 311 } 312 313 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */ 314 315 static void 316 gimple_push_bind_expr (gbind *bind_stmt) 317 { 318 gimplify_ctxp->bind_expr_stack.reserve (8); 319 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt); 320 } 321 322 /* Pop the first element off the stack of bindings. */ 323 324 static void 325 gimple_pop_bind_expr (void) 326 { 327 gimplify_ctxp->bind_expr_stack.pop (); 328 } 329 330 /* Return the first element of the stack of bindings. */ 331 332 gbind * 333 gimple_current_bind_expr (void) 334 { 335 return gimplify_ctxp->bind_expr_stack.last (); 336 } 337 338 /* Return the stack of bindings created during gimplification. */ 339 340 vec<gbind *> 341 gimple_bind_expr_stack (void) 342 { 343 return gimplify_ctxp->bind_expr_stack; 344 } 345 346 /* Return true iff there is a COND_EXPR between us and the innermost 347 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */ 348 349 static bool 350 gimple_conditional_context (void) 351 { 352 return gimplify_ctxp->conditions > 0; 353 } 354 355 /* Note that we've entered a COND_EXPR. */ 356 357 static void 358 gimple_push_condition (void) 359 { 360 #ifdef ENABLE_GIMPLE_CHECKING 361 if (gimplify_ctxp->conditions == 0) 362 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups)); 363 #endif 364 ++(gimplify_ctxp->conditions); 365 } 366 367 /* Note that we've left a COND_EXPR. If we're back at unconditional scope 368 now, add any conditional cleanups we've seen to the prequeue. */ 369 370 static void 371 gimple_pop_condition (gimple_seq *pre_p) 372 { 373 int conds = --(gimplify_ctxp->conditions); 374 375 gcc_assert (conds >= 0); 376 if (conds == 0) 377 { 378 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups); 379 gimplify_ctxp->conditional_cleanups = NULL; 380 } 381 } 382 383 /* A stable comparison routine for use with splay trees and DECLs. */ 384 385 static int 386 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb) 387 { 388 tree a = (tree) xa; 389 tree b = (tree) xb; 390 391 return DECL_UID (a) - DECL_UID (b); 392 } 393 394 /* Create a new omp construct that deals with variable remapping. */ 395 396 static struct gimplify_omp_ctx * 397 new_omp_context (enum omp_region_type region_type) 398 { 399 struct gimplify_omp_ctx *c; 400 401 c = XCNEW (struct gimplify_omp_ctx); 402 c->outer_context = gimplify_omp_ctxp; 403 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); 404 c->privatized_types = new hash_set<tree>; 405 c->location = input_location; 406 c->region_type = region_type; 407 if ((region_type & ORT_TASK) == 0) 408 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED; 409 else 410 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED; 411 412 return c; 413 } 414 415 /* Destroy an omp construct that deals with variable remapping. */ 416 417 static void 418 delete_omp_context (struct gimplify_omp_ctx *c) 419 { 420 splay_tree_delete (c->variables); 421 delete c->privatized_types; 422 c->loop_iter_var.release (); 423 XDELETE (c); 424 } 425 426 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int); 427 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool); 428 429 /* Both gimplify the statement T and append it to *SEQ_P. This function 430 behaves exactly as gimplify_stmt, but you don't have to pass T as a 431 reference. */ 432 433 void 434 gimplify_and_add (tree t, gimple_seq *seq_p) 435 { 436 gimplify_stmt (&t, seq_p); 437 } 438 439 /* Gimplify statement T into sequence *SEQ_P, and return the first 440 tuple in the sequence of generated tuples for this statement. 441 Return NULL if gimplifying T produced no tuples. */ 442 443 static gimple * 444 gimplify_and_return_first (tree t, gimple_seq *seq_p) 445 { 446 gimple_stmt_iterator last = gsi_last (*seq_p); 447 448 gimplify_and_add (t, seq_p); 449 450 if (!gsi_end_p (last)) 451 { 452 gsi_next (&last); 453 return gsi_stmt (last); 454 } 455 else 456 return gimple_seq_first_stmt (*seq_p); 457 } 458 459 /* Returns true iff T is a valid RHS for an assignment to an un-renamed 460 LHS, or for a call argument. */ 461 462 static bool 463 is_gimple_mem_rhs (tree t) 464 { 465 /* If we're dealing with a renamable type, either source or dest must be 466 a renamed variable. */ 467 if (is_gimple_reg_type (TREE_TYPE (t))) 468 return is_gimple_val (t); 469 else 470 return is_gimple_val (t) || is_gimple_lvalue (t); 471 } 472 473 /* Return true if T is a CALL_EXPR or an expression that can be 474 assigned to a temporary. Note that this predicate should only be 475 used during gimplification. See the rationale for this in 476 gimplify_modify_expr. */ 477 478 static bool 479 is_gimple_reg_rhs_or_call (tree t) 480 { 481 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS 482 || TREE_CODE (t) == CALL_EXPR); 483 } 484 485 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that 486 this predicate should only be used during gimplification. See the 487 rationale for this in gimplify_modify_expr. */ 488 489 static bool 490 is_gimple_mem_rhs_or_call (tree t) 491 { 492 /* If we're dealing with a renamable type, either source or dest must be 493 a renamed variable. */ 494 if (is_gimple_reg_type (TREE_TYPE (t))) 495 return is_gimple_val (t); 496 else 497 return (is_gimple_val (t) 498 || is_gimple_lvalue (t) 499 || TREE_CLOBBER_P (t) 500 || TREE_CODE (t) == CALL_EXPR); 501 } 502 503 /* Create a temporary with a name derived from VAL. Subroutine of 504 lookup_tmp_var; nobody else should call this function. */ 505 506 static inline tree 507 create_tmp_from_val (tree val) 508 { 509 /* Drop all qualifiers and address-space information from the value type. */ 510 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val)); 511 tree var = create_tmp_var (type, get_name (val)); 512 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE 513 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE) 514 DECL_GIMPLE_REG_P (var) = 1; 515 return var; 516 } 517 518 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse 519 an existing expression temporary. */ 520 521 static tree 522 lookup_tmp_var (tree val, bool is_formal) 523 { 524 tree ret; 525 526 /* If not optimizing, never really reuse a temporary. local-alloc 527 won't allocate any variable that is used in more than one basic 528 block, which means it will go into memory, causing much extra 529 work in reload and final and poorer code generation, outweighing 530 the extra memory allocation here. */ 531 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val)) 532 ret = create_tmp_from_val (val); 533 else 534 { 535 elt_t elt, *elt_p; 536 elt_t **slot; 537 538 elt.val = val; 539 if (!gimplify_ctxp->temp_htab) 540 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000); 541 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT); 542 if (*slot == NULL) 543 { 544 elt_p = XNEW (elt_t); 545 elt_p->val = val; 546 elt_p->temp = ret = create_tmp_from_val (val); 547 *slot = elt_p; 548 } 549 else 550 { 551 elt_p = *slot; 552 ret = elt_p->temp; 553 } 554 } 555 556 return ret; 557 } 558 559 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */ 560 561 static tree 562 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p, 563 bool is_formal, bool allow_ssa) 564 { 565 tree t, mod; 566 567 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we 568 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */ 569 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call, 570 fb_rvalue); 571 572 if (allow_ssa 573 && gimplify_ctxp->into_ssa 574 && is_gimple_reg_type (TREE_TYPE (val))) 575 { 576 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val))); 577 if (! gimple_in_ssa_p (cfun)) 578 { 579 const char *name = get_name (val); 580 if (name) 581 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name)); 582 } 583 } 584 else 585 t = lookup_tmp_var (val, is_formal); 586 587 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val)); 588 589 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location)); 590 591 /* gimplify_modify_expr might want to reduce this further. */ 592 gimplify_and_add (mod, pre_p); 593 ggc_free (mod); 594 595 return t; 596 } 597 598 /* Return a formal temporary variable initialized with VAL. PRE_P is as 599 in gimplify_expr. Only use this function if: 600 601 1) The value of the unfactored expression represented by VAL will not 602 change between the initialization and use of the temporary, and 603 2) The temporary will not be otherwise modified. 604 605 For instance, #1 means that this is inappropriate for SAVE_EXPR temps, 606 and #2 means it is inappropriate for && temps. 607 608 For other cases, use get_initialized_tmp_var instead. */ 609 610 tree 611 get_formal_tmp_var (tree val, gimple_seq *pre_p) 612 { 613 return internal_get_tmp_var (val, pre_p, NULL, true, true); 614 } 615 616 /* Return a temporary variable initialized with VAL. PRE_P and POST_P 617 are as in gimplify_expr. */ 618 619 tree 620 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p, 621 bool allow_ssa) 622 { 623 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa); 624 } 625 626 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true, 627 generate debug info for them; otherwise don't. */ 628 629 void 630 declare_vars (tree vars, gimple *gs, bool debug_info) 631 { 632 tree last = vars; 633 if (last) 634 { 635 tree temps, block; 636 637 gbind *scope = as_a <gbind *> (gs); 638 639 temps = nreverse (last); 640 641 block = gimple_bind_block (scope); 642 gcc_assert (!block || TREE_CODE (block) == BLOCK); 643 if (!block || !debug_info) 644 { 645 DECL_CHAIN (last) = gimple_bind_vars (scope); 646 gimple_bind_set_vars (scope, temps); 647 } 648 else 649 { 650 /* We need to attach the nodes both to the BIND_EXPR and to its 651 associated BLOCK for debugging purposes. The key point here 652 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR 653 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */ 654 if (BLOCK_VARS (block)) 655 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps); 656 else 657 { 658 gimple_bind_set_vars (scope, 659 chainon (gimple_bind_vars (scope), temps)); 660 BLOCK_VARS (block) = temps; 661 } 662 } 663 } 664 } 665 666 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound 667 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if 668 no such upper bound can be obtained. */ 669 670 static void 671 force_constant_size (tree var) 672 { 673 /* The only attempt we make is by querying the maximum size of objects 674 of the variable's type. */ 675 676 HOST_WIDE_INT max_size; 677 678 gcc_assert (VAR_P (var)); 679 680 max_size = max_int_size_in_bytes (TREE_TYPE (var)); 681 682 gcc_assert (max_size >= 0); 683 684 DECL_SIZE_UNIT (var) 685 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size); 686 DECL_SIZE (var) 687 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT); 688 } 689 690 /* Push the temporary variable TMP into the current binding. */ 691 692 void 693 gimple_add_tmp_var_fn (struct function *fn, tree tmp) 694 { 695 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); 696 697 /* Later processing assumes that the object size is constant, which might 698 not be true at this point. Force the use of a constant upper bound in 699 this case. */ 700 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp))) 701 force_constant_size (tmp); 702 703 DECL_CONTEXT (tmp) = fn->decl; 704 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; 705 706 record_vars_into (tmp, fn->decl); 707 } 708 709 /* Push the temporary variable TMP into the current binding. */ 710 711 void 712 gimple_add_tmp_var (tree tmp) 713 { 714 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); 715 716 /* Later processing assumes that the object size is constant, which might 717 not be true at this point. Force the use of a constant upper bound in 718 this case. */ 719 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp))) 720 force_constant_size (tmp); 721 722 DECL_CONTEXT (tmp) = current_function_decl; 723 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; 724 725 if (gimplify_ctxp) 726 { 727 DECL_CHAIN (tmp) = gimplify_ctxp->temps; 728 gimplify_ctxp->temps = tmp; 729 730 /* Mark temporaries local within the nearest enclosing parallel. */ 731 if (gimplify_omp_ctxp) 732 { 733 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 734 while (ctx 735 && (ctx->region_type == ORT_WORKSHARE 736 || ctx->region_type == ORT_SIMD 737 || ctx->region_type == ORT_ACC)) 738 ctx = ctx->outer_context; 739 if (ctx) 740 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN); 741 } 742 } 743 else if (cfun) 744 record_vars (tmp); 745 else 746 { 747 gimple_seq body_seq; 748 749 /* This case is for nested functions. We need to expose the locals 750 they create. */ 751 body_seq = gimple_body (current_function_decl); 752 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false); 753 } 754 } 755 756 757 758 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree 759 nodes that are referenced more than once in GENERIC functions. This is 760 necessary because gimplification (translation into GIMPLE) is performed 761 by modifying tree nodes in-place, so gimplication of a shared node in a 762 first context could generate an invalid GIMPLE form in a second context. 763 764 This is achieved with a simple mark/copy/unmark algorithm that walks the 765 GENERIC representation top-down, marks nodes with TREE_VISITED the first 766 time it encounters them, duplicates them if they already have TREE_VISITED 767 set, and finally removes the TREE_VISITED marks it has set. 768 769 The algorithm works only at the function level, i.e. it generates a GENERIC 770 representation of a function with no nodes shared within the function when 771 passed a GENERIC function (except for nodes that are allowed to be shared). 772 773 At the global level, it is also necessary to unshare tree nodes that are 774 referenced in more than one function, for the same aforementioned reason. 775 This requires some cooperation from the front-end. There are 2 strategies: 776 777 1. Manual unsharing. The front-end needs to call unshare_expr on every 778 expression that might end up being shared across functions. 779 780 2. Deep unsharing. This is an extension of regular unsharing. Instead 781 of calling unshare_expr on expressions that might be shared across 782 functions, the front-end pre-marks them with TREE_VISITED. This will 783 ensure that they are unshared on the first reference within functions 784 when the regular unsharing algorithm runs. The counterpart is that 785 this algorithm must look deeper than for manual unsharing, which is 786 specified by LANG_HOOKS_DEEP_UNSHARING. 787 788 If there are only few specific cases of node sharing across functions, it is 789 probably easier for a front-end to unshare the expressions manually. On the 790 contrary, if the expressions generated at the global level are as widespread 791 as expressions generated within functions, deep unsharing is very likely the 792 way to go. */ 793 794 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes. 795 These nodes model computations that must be done once. If we were to 796 unshare something like SAVE_EXPR(i++), the gimplification process would 797 create wrong code. However, if DATA is non-null, it must hold a pointer 798 set that is used to unshare the subtrees of these nodes. */ 799 800 static tree 801 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data) 802 { 803 tree t = *tp; 804 enum tree_code code = TREE_CODE (t); 805 806 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but 807 copy their subtrees if we can make sure to do it only once. */ 808 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR) 809 { 810 if (data && !((hash_set<tree> *)data)->add (t)) 811 ; 812 else 813 *walk_subtrees = 0; 814 } 815 816 /* Stop at types, decls, constants like copy_tree_r. */ 817 else if (TREE_CODE_CLASS (code) == tcc_type 818 || TREE_CODE_CLASS (code) == tcc_declaration 819 || TREE_CODE_CLASS (code) == tcc_constant 820 /* We can't do anything sensible with a BLOCK used as an 821 expression, but we also can't just die when we see it 822 because of non-expression uses. So we avert our eyes 823 and cross our fingers. Silly Java. */ 824 || code == BLOCK) 825 *walk_subtrees = 0; 826 827 /* Cope with the statement expression extension. */ 828 else if (code == STATEMENT_LIST) 829 ; 830 831 /* Leave the bulk of the work to copy_tree_r itself. */ 832 else 833 copy_tree_r (tp, walk_subtrees, NULL); 834 835 return NULL_TREE; 836 } 837 838 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP. 839 If *TP has been visited already, then *TP is deeply copied by calling 840 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */ 841 842 static tree 843 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data) 844 { 845 tree t = *tp; 846 enum tree_code code = TREE_CODE (t); 847 848 /* Skip types, decls, and constants. But we do want to look at their 849 types and the bounds of types. Mark them as visited so we properly 850 unmark their subtrees on the unmark pass. If we've already seen them, 851 don't look down further. */ 852 if (TREE_CODE_CLASS (code) == tcc_type 853 || TREE_CODE_CLASS (code) == tcc_declaration 854 || TREE_CODE_CLASS (code) == tcc_constant) 855 { 856 if (TREE_VISITED (t)) 857 *walk_subtrees = 0; 858 else 859 TREE_VISITED (t) = 1; 860 } 861 862 /* If this node has been visited already, unshare it and don't look 863 any deeper. */ 864 else if (TREE_VISITED (t)) 865 { 866 walk_tree (tp, mostly_copy_tree_r, data, NULL); 867 *walk_subtrees = 0; 868 } 869 870 /* Otherwise, mark the node as visited and keep looking. */ 871 else 872 TREE_VISITED (t) = 1; 873 874 return NULL_TREE; 875 } 876 877 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the 878 copy_if_shared_r callback unmodified. */ 879 880 static inline void 881 copy_if_shared (tree *tp, void *data) 882 { 883 walk_tree (tp, copy_if_shared_r, data, NULL); 884 } 885 886 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of 887 any nested functions. */ 888 889 static void 890 unshare_body (tree fndecl) 891 { 892 struct cgraph_node *cgn = cgraph_node::get (fndecl); 893 /* If the language requires deep unsharing, we need a pointer set to make 894 sure we don't repeatedly unshare subtrees of unshareable nodes. */ 895 hash_set<tree> *visited 896 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL; 897 898 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited); 899 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited); 900 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited); 901 902 delete visited; 903 904 if (cgn) 905 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) 906 unshare_body (cgn->decl); 907 } 908 909 /* Callback for walk_tree to unmark the visited trees rooted at *TP. 910 Subtrees are walked until the first unvisited node is encountered. */ 911 912 static tree 913 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) 914 { 915 tree t = *tp; 916 917 /* If this node has been visited, unmark it and keep looking. */ 918 if (TREE_VISITED (t)) 919 TREE_VISITED (t) = 0; 920 921 /* Otherwise, don't look any deeper. */ 922 else 923 *walk_subtrees = 0; 924 925 return NULL_TREE; 926 } 927 928 /* Unmark the visited trees rooted at *TP. */ 929 930 static inline void 931 unmark_visited (tree *tp) 932 { 933 walk_tree (tp, unmark_visited_r, NULL, NULL); 934 } 935 936 /* Likewise, but mark all trees as not visited. */ 937 938 static void 939 unvisit_body (tree fndecl) 940 { 941 struct cgraph_node *cgn = cgraph_node::get (fndecl); 942 943 unmark_visited (&DECL_SAVED_TREE (fndecl)); 944 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl))); 945 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl))); 946 947 if (cgn) 948 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) 949 unvisit_body (cgn->decl); 950 } 951 952 /* Unconditionally make an unshared copy of EXPR. This is used when using 953 stored expressions which span multiple functions, such as BINFO_VTABLE, 954 as the normal unsharing process can't tell that they're shared. */ 955 956 tree 957 unshare_expr (tree expr) 958 { 959 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL); 960 return expr; 961 } 962 963 /* Worker for unshare_expr_without_location. */ 964 965 static tree 966 prune_expr_location (tree *tp, int *walk_subtrees, void *) 967 { 968 if (EXPR_P (*tp)) 969 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION); 970 else 971 *walk_subtrees = 0; 972 return NULL_TREE; 973 } 974 975 /* Similar to unshare_expr but also prune all expression locations 976 from EXPR. */ 977 978 tree 979 unshare_expr_without_location (tree expr) 980 { 981 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL); 982 if (EXPR_P (expr)) 983 walk_tree (&expr, prune_expr_location, NULL, NULL); 984 return expr; 985 } 986 987 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both 988 contain statements and have a value. Assign its value to a temporary 989 and give it void_type_node. Return the temporary, or NULL_TREE if 990 WRAPPER was already void. */ 991 992 tree 993 voidify_wrapper_expr (tree wrapper, tree temp) 994 { 995 tree type = TREE_TYPE (wrapper); 996 if (type && !VOID_TYPE_P (type)) 997 { 998 tree *p; 999 1000 /* Set p to point to the body of the wrapper. Loop until we find 1001 something that isn't a wrapper. */ 1002 for (p = &wrapper; p && *p; ) 1003 { 1004 switch (TREE_CODE (*p)) 1005 { 1006 case BIND_EXPR: 1007 TREE_SIDE_EFFECTS (*p) = 1; 1008 TREE_TYPE (*p) = void_type_node; 1009 /* For a BIND_EXPR, the body is operand 1. */ 1010 p = &BIND_EXPR_BODY (*p); 1011 break; 1012 1013 case CLEANUP_POINT_EXPR: 1014 case TRY_FINALLY_EXPR: 1015 case TRY_CATCH_EXPR: 1016 TREE_SIDE_EFFECTS (*p) = 1; 1017 TREE_TYPE (*p) = void_type_node; 1018 p = &TREE_OPERAND (*p, 0); 1019 break; 1020 1021 case STATEMENT_LIST: 1022 { 1023 tree_stmt_iterator i = tsi_last (*p); 1024 TREE_SIDE_EFFECTS (*p) = 1; 1025 TREE_TYPE (*p) = void_type_node; 1026 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i); 1027 } 1028 break; 1029 1030 case COMPOUND_EXPR: 1031 /* Advance to the last statement. Set all container types to 1032 void. */ 1033 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1)) 1034 { 1035 TREE_SIDE_EFFECTS (*p) = 1; 1036 TREE_TYPE (*p) = void_type_node; 1037 } 1038 break; 1039 1040 case TRANSACTION_EXPR: 1041 TREE_SIDE_EFFECTS (*p) = 1; 1042 TREE_TYPE (*p) = void_type_node; 1043 p = &TRANSACTION_EXPR_BODY (*p); 1044 break; 1045 1046 default: 1047 /* Assume that any tree upon which voidify_wrapper_expr is 1048 directly called is a wrapper, and that its body is op0. */ 1049 if (p == &wrapper) 1050 { 1051 TREE_SIDE_EFFECTS (*p) = 1; 1052 TREE_TYPE (*p) = void_type_node; 1053 p = &TREE_OPERAND (*p, 0); 1054 break; 1055 } 1056 goto out; 1057 } 1058 } 1059 1060 out: 1061 if (p == NULL || IS_EMPTY_STMT (*p)) 1062 temp = NULL_TREE; 1063 else if (temp) 1064 { 1065 /* The wrapper is on the RHS of an assignment that we're pushing 1066 down. */ 1067 gcc_assert (TREE_CODE (temp) == INIT_EXPR 1068 || TREE_CODE (temp) == MODIFY_EXPR); 1069 TREE_OPERAND (temp, 1) = *p; 1070 *p = temp; 1071 } 1072 else 1073 { 1074 temp = create_tmp_var (type, "retval"); 1075 *p = build2 (INIT_EXPR, type, temp, *p); 1076 } 1077 1078 return temp; 1079 } 1080 1081 return NULL_TREE; 1082 } 1083 1084 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as 1085 a temporary through which they communicate. */ 1086 1087 static void 1088 build_stack_save_restore (gcall **save, gcall **restore) 1089 { 1090 tree tmp_var; 1091 1092 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0); 1093 tmp_var = create_tmp_var (ptr_type_node, "saved_stack"); 1094 gimple_call_set_lhs (*save, tmp_var); 1095 1096 *restore 1097 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE), 1098 1, tmp_var); 1099 } 1100 1101 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */ 1102 1103 static tree 1104 build_asan_poison_call_expr (tree decl) 1105 { 1106 /* Do not poison variables that have size equal to zero. */ 1107 tree unit_size = DECL_SIZE_UNIT (decl); 1108 if (zerop (unit_size)) 1109 return NULL_TREE; 1110 1111 tree base = build_fold_addr_expr (decl); 1112 1113 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK, 1114 void_type_node, 3, 1115 build_int_cst (integer_type_node, 1116 ASAN_MARK_POISON), 1117 base, unit_size); 1118 } 1119 1120 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending 1121 on POISON flag, shadow memory of a DECL variable. The call will be 1122 put on location identified by IT iterator, where BEFORE flag drives 1123 position where the stmt will be put. */ 1124 1125 static void 1126 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it, 1127 bool before) 1128 { 1129 tree unit_size = DECL_SIZE_UNIT (decl); 1130 tree base = build_fold_addr_expr (decl); 1131 1132 /* Do not poison variables that have size equal to zero. */ 1133 if (zerop (unit_size)) 1134 return; 1135 1136 /* It's necessary to have all stack variables aligned to ASAN granularity 1137 bytes. */ 1138 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY) 1139 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY); 1140 1141 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON; 1142 1143 gimple *g 1144 = gimple_build_call_internal (IFN_ASAN_MARK, 3, 1145 build_int_cst (integer_type_node, flags), 1146 base, unit_size); 1147 1148 if (before) 1149 gsi_insert_before (it, g, GSI_NEW_STMT); 1150 else 1151 gsi_insert_after (it, g, GSI_NEW_STMT); 1152 } 1153 1154 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag 1155 either poisons or unpoisons a DECL. Created statement is appended 1156 to SEQ_P gimple sequence. */ 1157 1158 static void 1159 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p) 1160 { 1161 gimple_stmt_iterator it = gsi_last (*seq_p); 1162 bool before = false; 1163 1164 if (gsi_end_p (it)) 1165 before = true; 1166 1167 asan_poison_variable (decl, poison, &it, before); 1168 } 1169 1170 /* Sort pair of VAR_DECLs A and B by DECL_UID. */ 1171 1172 static int 1173 sort_by_decl_uid (const void *a, const void *b) 1174 { 1175 const tree *t1 = (const tree *)a; 1176 const tree *t2 = (const tree *)b; 1177 1178 int uid1 = DECL_UID (*t1); 1179 int uid2 = DECL_UID (*t2); 1180 1181 if (uid1 < uid2) 1182 return -1; 1183 else if (uid1 > uid2) 1184 return 1; 1185 else 1186 return 0; 1187 } 1188 1189 /* Generate IFN_ASAN_MARK internal call for all VARIABLES 1190 depending on POISON flag. Created statement is appended 1191 to SEQ_P gimple sequence. */ 1192 1193 static void 1194 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p) 1195 { 1196 unsigned c = variables->elements (); 1197 if (c == 0) 1198 return; 1199 1200 auto_vec<tree> sorted_variables (c); 1201 1202 for (hash_set<tree>::iterator it = variables->begin (); 1203 it != variables->end (); ++it) 1204 sorted_variables.safe_push (*it); 1205 1206 sorted_variables.qsort (sort_by_decl_uid); 1207 1208 unsigned i; 1209 tree var; 1210 FOR_EACH_VEC_ELT (sorted_variables, i, var) 1211 { 1212 asan_poison_variable (var, poison, seq_p); 1213 1214 /* Add use_after_scope_memory attribute for the variable in order 1215 to prevent re-written into SSA. */ 1216 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE, 1217 DECL_ATTRIBUTES (var))) 1218 DECL_ATTRIBUTES (var) 1219 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE), 1220 integer_one_node, 1221 DECL_ATTRIBUTES (var)); 1222 } 1223 } 1224 1225 /* Gimplify a BIND_EXPR. Just voidify and recurse. */ 1226 1227 static enum gimplify_status 1228 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p) 1229 { 1230 tree bind_expr = *expr_p; 1231 bool old_keep_stack = gimplify_ctxp->keep_stack; 1232 bool old_save_stack = gimplify_ctxp->save_stack; 1233 tree t; 1234 gbind *bind_stmt; 1235 gimple_seq body, cleanup; 1236 gcall *stack_save; 1237 location_t start_locus = 0, end_locus = 0; 1238 tree ret_clauses = NULL; 1239 1240 tree temp = voidify_wrapper_expr (bind_expr, NULL); 1241 1242 /* Mark variables seen in this bind expr. */ 1243 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t)) 1244 { 1245 if (VAR_P (t)) 1246 { 1247 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 1248 1249 /* Mark variable as local. */ 1250 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t) 1251 && (! DECL_SEEN_IN_BIND_EXPR_P (t) 1252 || splay_tree_lookup (ctx->variables, 1253 (splay_tree_key) t) == NULL)) 1254 { 1255 int flag = GOVD_LOCAL; 1256 if (ctx->region_type == ORT_SIMD 1257 && TREE_ADDRESSABLE (t) 1258 && !TREE_STATIC (t)) 1259 { 1260 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST) 1261 ctx->add_safelen1 = true; 1262 else 1263 flag = GOVD_PRIVATE; 1264 } 1265 omp_add_variable (ctx, t, flag | GOVD_SEEN); 1266 } 1267 1268 DECL_SEEN_IN_BIND_EXPR_P (t) = 1; 1269 1270 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun) 1271 cfun->has_local_explicit_reg_vars = true; 1272 } 1273 1274 /* Preliminarily mark non-addressed complex variables as eligible 1275 for promotion to gimple registers. We'll transform their uses 1276 as we find them. */ 1277 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE 1278 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) 1279 && !TREE_THIS_VOLATILE (t) 1280 && (VAR_P (t) && !DECL_HARD_REGISTER (t)) 1281 && !needs_to_live_in_memory (t)) 1282 DECL_GIMPLE_REG_P (t) = 1; 1283 } 1284 1285 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL, 1286 BIND_EXPR_BLOCK (bind_expr)); 1287 gimple_push_bind_expr (bind_stmt); 1288 1289 gimplify_ctxp->keep_stack = false; 1290 gimplify_ctxp->save_stack = false; 1291 1292 /* Gimplify the body into the GIMPLE_BIND tuple's body. */ 1293 body = NULL; 1294 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body); 1295 gimple_bind_set_body (bind_stmt, body); 1296 1297 /* Source location wise, the cleanup code (stack_restore and clobbers) 1298 belongs to the end of the block, so propagate what we have. The 1299 stack_save operation belongs to the beginning of block, which we can 1300 infer from the bind_expr directly if the block has no explicit 1301 assignment. */ 1302 if (BIND_EXPR_BLOCK (bind_expr)) 1303 { 1304 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr)); 1305 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr)); 1306 } 1307 if (start_locus == 0) 1308 start_locus = EXPR_LOCATION (bind_expr); 1309 1310 cleanup = NULL; 1311 stack_save = NULL; 1312 1313 /* If the code both contains VLAs and calls alloca, then we cannot reclaim 1314 the stack space allocated to the VLAs. */ 1315 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack) 1316 { 1317 gcall *stack_restore; 1318 1319 /* Save stack on entry and restore it on exit. Add a try_finally 1320 block to achieve this. */ 1321 build_stack_save_restore (&stack_save, &stack_restore); 1322 1323 gimple_set_location (stack_save, start_locus); 1324 gimple_set_location (stack_restore, end_locus); 1325 1326 gimplify_seq_add_stmt (&cleanup, stack_restore); 1327 } 1328 1329 /* Add clobbers for all variables that go out of scope. */ 1330 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t)) 1331 { 1332 if (VAR_P (t) 1333 && !is_global_var (t) 1334 && DECL_CONTEXT (t) == current_function_decl) 1335 { 1336 if (!DECL_HARD_REGISTER (t) 1337 && !TREE_THIS_VOLATILE (t) 1338 && !DECL_HAS_VALUE_EXPR_P (t) 1339 /* Only care for variables that have to be in memory. Others 1340 will be rewritten into SSA names, hence moved to the 1341 top-level. */ 1342 && !is_gimple_reg (t) 1343 && flag_stack_reuse != SR_NONE) 1344 { 1345 tree clobber = build_constructor (TREE_TYPE (t), NULL); 1346 gimple *clobber_stmt; 1347 TREE_THIS_VOLATILE (clobber) = 1; 1348 clobber_stmt = gimple_build_assign (t, clobber); 1349 gimple_set_location (clobber_stmt, end_locus); 1350 gimplify_seq_add_stmt (&cleanup, clobber_stmt); 1351 } 1352 1353 if (flag_openacc && oacc_declare_returns != NULL) 1354 { 1355 tree *c = oacc_declare_returns->get (t); 1356 if (c != NULL) 1357 { 1358 if (ret_clauses) 1359 OMP_CLAUSE_CHAIN (*c) = ret_clauses; 1360 1361 ret_clauses = *c; 1362 1363 oacc_declare_returns->remove (t); 1364 1365 if (oacc_declare_returns->elements () == 0) 1366 { 1367 delete oacc_declare_returns; 1368 oacc_declare_returns = NULL; 1369 } 1370 } 1371 } 1372 } 1373 1374 if (asan_poisoned_variables != NULL 1375 && asan_poisoned_variables->contains (t)) 1376 { 1377 asan_poisoned_variables->remove (t); 1378 asan_poison_variable (t, true, &cleanup); 1379 } 1380 1381 if (gimplify_ctxp->live_switch_vars != NULL 1382 && gimplify_ctxp->live_switch_vars->contains (t)) 1383 gimplify_ctxp->live_switch_vars->remove (t); 1384 } 1385 1386 if (ret_clauses) 1387 { 1388 gomp_target *stmt; 1389 gimple_stmt_iterator si = gsi_start (cleanup); 1390 1391 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE, 1392 ret_clauses); 1393 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT); 1394 } 1395 1396 if (cleanup) 1397 { 1398 gtry *gs; 1399 gimple_seq new_body; 1400 1401 new_body = NULL; 1402 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup, 1403 GIMPLE_TRY_FINALLY); 1404 1405 if (stack_save) 1406 gimplify_seq_add_stmt (&new_body, stack_save); 1407 gimplify_seq_add_stmt (&new_body, gs); 1408 gimple_bind_set_body (bind_stmt, new_body); 1409 } 1410 1411 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */ 1412 if (!gimplify_ctxp->keep_stack) 1413 gimplify_ctxp->keep_stack = old_keep_stack; 1414 gimplify_ctxp->save_stack = old_save_stack; 1415 1416 gimple_pop_bind_expr (); 1417 1418 gimplify_seq_add_stmt (pre_p, bind_stmt); 1419 1420 if (temp) 1421 { 1422 *expr_p = temp; 1423 return GS_OK; 1424 } 1425 1426 *expr_p = NULL_TREE; 1427 return GS_ALL_DONE; 1428 } 1429 1430 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a 1431 GIMPLE value, it is assigned to a new temporary and the statement is 1432 re-written to return the temporary. 1433 1434 PRE_P points to the sequence where side effects that must happen before 1435 STMT should be stored. */ 1436 1437 static enum gimplify_status 1438 gimplify_return_expr (tree stmt, gimple_seq *pre_p) 1439 { 1440 greturn *ret; 1441 tree ret_expr = TREE_OPERAND (stmt, 0); 1442 tree result_decl, result; 1443 1444 if (ret_expr == error_mark_node) 1445 return GS_ERROR; 1446 1447 /* Implicit _Cilk_sync must be inserted right before any return statement 1448 if there is a _Cilk_spawn in the function. If the user has provided a 1449 _Cilk_sync, the optimizer should remove this duplicate one. */ 1450 if (fn_contains_cilk_spawn_p (cfun)) 1451 { 1452 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node); 1453 gimplify_and_add (impl_sync, pre_p); 1454 } 1455 1456 if (!ret_expr 1457 || TREE_CODE (ret_expr) == RESULT_DECL 1458 || ret_expr == error_mark_node) 1459 { 1460 greturn *ret = gimple_build_return (ret_expr); 1461 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); 1462 gimplify_seq_add_stmt (pre_p, ret); 1463 return GS_ALL_DONE; 1464 } 1465 1466 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))) 1467 result_decl = NULL_TREE; 1468 else 1469 { 1470 result_decl = TREE_OPERAND (ret_expr, 0); 1471 1472 /* See through a return by reference. */ 1473 if (TREE_CODE (result_decl) == INDIRECT_REF) 1474 result_decl = TREE_OPERAND (result_decl, 0); 1475 1476 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR 1477 || TREE_CODE (ret_expr) == INIT_EXPR) 1478 && TREE_CODE (result_decl) == RESULT_DECL); 1479 } 1480 1481 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL. 1482 Recall that aggregate_value_p is FALSE for any aggregate type that is 1483 returned in registers. If we're returning values in registers, then 1484 we don't want to extend the lifetime of the RESULT_DECL, particularly 1485 across another call. In addition, for those aggregates for which 1486 hard_function_value generates a PARALLEL, we'll die during normal 1487 expansion of structure assignments; there's special code in expand_return 1488 to handle this case that does not exist in expand_expr. */ 1489 if (!result_decl) 1490 result = NULL_TREE; 1491 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl))) 1492 { 1493 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST) 1494 { 1495 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl))) 1496 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p); 1497 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL 1498 should be effectively allocated by the caller, i.e. all calls to 1499 this function must be subject to the Return Slot Optimization. */ 1500 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p); 1501 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p); 1502 } 1503 result = result_decl; 1504 } 1505 else if (gimplify_ctxp->return_temp) 1506 result = gimplify_ctxp->return_temp; 1507 else 1508 { 1509 result = create_tmp_reg (TREE_TYPE (result_decl)); 1510 1511 /* ??? With complex control flow (usually involving abnormal edges), 1512 we can wind up warning about an uninitialized value for this. Due 1513 to how this variable is constructed and initialized, this is never 1514 true. Give up and never warn. */ 1515 TREE_NO_WARNING (result) = 1; 1516 1517 gimplify_ctxp->return_temp = result; 1518 } 1519 1520 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use. 1521 Then gimplify the whole thing. */ 1522 if (result != result_decl) 1523 TREE_OPERAND (ret_expr, 0) = result; 1524 1525 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p); 1526 1527 ret = gimple_build_return (result); 1528 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); 1529 gimplify_seq_add_stmt (pre_p, ret); 1530 1531 return GS_ALL_DONE; 1532 } 1533 1534 /* Gimplify a variable-length array DECL. */ 1535 1536 static void 1537 gimplify_vla_decl (tree decl, gimple_seq *seq_p) 1538 { 1539 /* This is a variable-sized decl. Simplify its size and mark it 1540 for deferred expansion. */ 1541 tree t, addr, ptr_type; 1542 1543 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p); 1544 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p); 1545 1546 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */ 1547 if (DECL_HAS_VALUE_EXPR_P (decl)) 1548 return; 1549 1550 /* All occurrences of this decl in final gimplified code will be 1551 replaced by indirection. Setting DECL_VALUE_EXPR does two 1552 things: First, it lets the rest of the gimplifier know what 1553 replacement to use. Second, it lets the debug info know 1554 where to find the value. */ 1555 ptr_type = build_pointer_type (TREE_TYPE (decl)); 1556 addr = create_tmp_var (ptr_type, get_name (decl)); 1557 DECL_IGNORED_P (addr) = 0; 1558 t = build_fold_indirect_ref (addr); 1559 TREE_THIS_NOTRAP (t) = 1; 1560 SET_DECL_VALUE_EXPR (decl, t); 1561 DECL_HAS_VALUE_EXPR_P (decl) = 1; 1562 1563 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN); 1564 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl), 1565 size_int (DECL_ALIGN (decl))); 1566 /* The call has been built for a variable-sized object. */ 1567 CALL_ALLOCA_FOR_VAR_P (t) = 1; 1568 t = fold_convert (ptr_type, t); 1569 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t); 1570 1571 gimplify_and_add (t, seq_p); 1572 } 1573 1574 /* A helper function to be called via walk_tree. Mark all labels under *TP 1575 as being forced. To be called for DECL_INITIAL of static variables. */ 1576 1577 static tree 1578 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) 1579 { 1580 if (TYPE_P (*tp)) 1581 *walk_subtrees = 0; 1582 if (TREE_CODE (*tp) == LABEL_DECL) 1583 { 1584 FORCED_LABEL (*tp) = 1; 1585 cfun->has_forced_label_in_static = 1; 1586 } 1587 1588 return NULL_TREE; 1589 } 1590 1591 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation 1592 and initialization explicit. */ 1593 1594 static enum gimplify_status 1595 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p) 1596 { 1597 tree stmt = *stmt_p; 1598 tree decl = DECL_EXPR_DECL (stmt); 1599 1600 *stmt_p = NULL_TREE; 1601 1602 if (TREE_TYPE (decl) == error_mark_node) 1603 return GS_ERROR; 1604 1605 if ((TREE_CODE (decl) == TYPE_DECL 1606 || VAR_P (decl)) 1607 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl))) 1608 { 1609 gimplify_type_sizes (TREE_TYPE (decl), seq_p); 1610 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE) 1611 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p); 1612 } 1613 1614 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified 1615 in case its size expressions contain problematic nodes like CALL_EXPR. */ 1616 if (TREE_CODE (decl) == TYPE_DECL 1617 && DECL_ORIGINAL_TYPE (decl) 1618 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl))) 1619 { 1620 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p); 1621 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE) 1622 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p); 1623 } 1624 1625 if (VAR_P (decl) && !DECL_EXTERNAL (decl)) 1626 { 1627 tree init = DECL_INITIAL (decl); 1628 bool is_vla = false; 1629 1630 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST 1631 || (!TREE_STATIC (decl) 1632 && flag_stack_check == GENERIC_STACK_CHECK 1633 && compare_tree_int (DECL_SIZE_UNIT (decl), 1634 STACK_CHECK_MAX_VAR_SIZE) > 0)) 1635 { 1636 gimplify_vla_decl (decl, seq_p); 1637 is_vla = true; 1638 } 1639 1640 if (asan_poisoned_variables 1641 && !is_vla 1642 && TREE_ADDRESSABLE (decl) 1643 && !TREE_STATIC (decl) 1644 && !DECL_HAS_VALUE_EXPR_P (decl) 1645 && dbg_cnt (asan_use_after_scope) 1646 && !gimplify_omp_ctxp) 1647 { 1648 asan_poisoned_variables->add (decl); 1649 asan_poison_variable (decl, false, seq_p); 1650 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars) 1651 gimplify_ctxp->live_switch_vars->add (decl); 1652 } 1653 1654 /* Some front ends do not explicitly declare all anonymous 1655 artificial variables. We compensate here by declaring the 1656 variables, though it would be better if the front ends would 1657 explicitly declare them. */ 1658 if (!DECL_SEEN_IN_BIND_EXPR_P (decl) 1659 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE) 1660 gimple_add_tmp_var (decl); 1661 1662 if (init && init != error_mark_node) 1663 { 1664 if (!TREE_STATIC (decl)) 1665 { 1666 DECL_INITIAL (decl) = NULL_TREE; 1667 init = build2 (INIT_EXPR, void_type_node, decl, init); 1668 gimplify_and_add (init, seq_p); 1669 ggc_free (init); 1670 } 1671 else 1672 /* We must still examine initializers for static variables 1673 as they may contain a label address. */ 1674 walk_tree (&init, force_labels_r, NULL, NULL); 1675 } 1676 } 1677 1678 return GS_ALL_DONE; 1679 } 1680 1681 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body 1682 and replacing the LOOP_EXPR with goto, but if the loop contains an 1683 EXIT_EXPR, we need to append a label for it to jump to. */ 1684 1685 static enum gimplify_status 1686 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p) 1687 { 1688 tree saved_label = gimplify_ctxp->exit_label; 1689 tree start_label = create_artificial_label (UNKNOWN_LOCATION); 1690 1691 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label)); 1692 1693 gimplify_ctxp->exit_label = NULL_TREE; 1694 1695 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p); 1696 1697 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label)); 1698 1699 if (gimplify_ctxp->exit_label) 1700 gimplify_seq_add_stmt (pre_p, 1701 gimple_build_label (gimplify_ctxp->exit_label)); 1702 1703 gimplify_ctxp->exit_label = saved_label; 1704 1705 *expr_p = NULL; 1706 return GS_ALL_DONE; 1707 } 1708 1709 /* Gimplify a statement list onto a sequence. These may be created either 1710 by an enlightened front-end, or by shortcut_cond_expr. */ 1711 1712 static enum gimplify_status 1713 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p) 1714 { 1715 tree temp = voidify_wrapper_expr (*expr_p, NULL); 1716 1717 tree_stmt_iterator i = tsi_start (*expr_p); 1718 1719 while (!tsi_end_p (i)) 1720 { 1721 gimplify_stmt (tsi_stmt_ptr (i), pre_p); 1722 tsi_delink (&i); 1723 } 1724 1725 if (temp) 1726 { 1727 *expr_p = temp; 1728 return GS_OK; 1729 } 1730 1731 return GS_ALL_DONE; 1732 } 1733 1734 /* Callback for walk_gimple_seq. */ 1735 1736 static tree 1737 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, 1738 struct walk_stmt_info *wi) 1739 { 1740 gimple *stmt = gsi_stmt (*gsi_p); 1741 1742 *handled_ops_p = true; 1743 switch (gimple_code (stmt)) 1744 { 1745 case GIMPLE_TRY: 1746 /* A compiler-generated cleanup or a user-written try block. 1747 If it's empty, don't dive into it--that would result in 1748 worse location info. */ 1749 if (gimple_try_eval (stmt) == NULL) 1750 { 1751 wi->info = stmt; 1752 return integer_zero_node; 1753 } 1754 /* Fall through. */ 1755 case GIMPLE_BIND: 1756 case GIMPLE_CATCH: 1757 case GIMPLE_EH_FILTER: 1758 case GIMPLE_TRANSACTION: 1759 /* Walk the sub-statements. */ 1760 *handled_ops_p = false; 1761 break; 1762 case GIMPLE_CALL: 1763 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK)) 1764 { 1765 *handled_ops_p = false; 1766 break; 1767 } 1768 /* Fall through. */ 1769 default: 1770 /* Save the first "real" statement (not a decl/lexical scope/...). */ 1771 wi->info = stmt; 1772 return integer_zero_node; 1773 } 1774 return NULL_TREE; 1775 } 1776 1777 /* Possibly warn about unreachable statements between switch's controlling 1778 expression and the first case. SEQ is the body of a switch expression. */ 1779 1780 static void 1781 maybe_warn_switch_unreachable (gimple_seq seq) 1782 { 1783 if (!warn_switch_unreachable 1784 /* This warning doesn't play well with Fortran when optimizations 1785 are on. */ 1786 || lang_GNU_Fortran () 1787 || seq == NULL) 1788 return; 1789 1790 struct walk_stmt_info wi; 1791 memset (&wi, 0, sizeof (wi)); 1792 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi); 1793 gimple *stmt = (gimple *) wi.info; 1794 1795 if (stmt && gimple_code (stmt) != GIMPLE_LABEL) 1796 { 1797 if (gimple_code (stmt) == GIMPLE_GOTO 1798 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL 1799 && DECL_ARTIFICIAL (gimple_goto_dest (stmt))) 1800 /* Don't warn for compiler-generated gotos. These occur 1801 in Duff's devices, for example. */; 1802 else 1803 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable, 1804 "statement will never be executed"); 1805 } 1806 } 1807 1808 1809 /* A label entry that pairs label and a location. */ 1810 struct label_entry 1811 { 1812 tree label; 1813 location_t loc; 1814 }; 1815 1816 /* Find LABEL in vector of label entries VEC. */ 1817 1818 static struct label_entry * 1819 find_label_entry (const auto_vec<struct label_entry> *vec, tree label) 1820 { 1821 unsigned int i; 1822 struct label_entry *l; 1823 1824 FOR_EACH_VEC_ELT (*vec, i, l) 1825 if (l->label == label) 1826 return l; 1827 return NULL; 1828 } 1829 1830 /* Return true if LABEL, a LABEL_DECL, represents a case label 1831 in a vector of labels CASES. */ 1832 1833 static bool 1834 case_label_p (const vec<tree> *cases, tree label) 1835 { 1836 unsigned int i; 1837 tree l; 1838 1839 FOR_EACH_VEC_ELT (*cases, i, l) 1840 if (CASE_LABEL (l) == label) 1841 return true; 1842 return false; 1843 } 1844 1845 /* Find the last statement in a scope STMT. */ 1846 1847 static gimple * 1848 last_stmt_in_scope (gimple *stmt) 1849 { 1850 if (!stmt) 1851 return NULL; 1852 1853 switch (gimple_code (stmt)) 1854 { 1855 case GIMPLE_BIND: 1856 { 1857 gbind *bind = as_a <gbind *> (stmt); 1858 stmt = gimple_seq_last_stmt (gimple_bind_body (bind)); 1859 return last_stmt_in_scope (stmt); 1860 } 1861 1862 case GIMPLE_TRY: 1863 { 1864 gtry *try_stmt = as_a <gtry *> (stmt); 1865 stmt = gimple_seq_last_stmt (gimple_try_eval (try_stmt)); 1866 gimple *last_eval = last_stmt_in_scope (stmt); 1867 if (gimple_stmt_may_fallthru (last_eval) 1868 && (last_eval == NULL 1869 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH)) 1870 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY) 1871 { 1872 stmt = gimple_seq_last_stmt (gimple_try_cleanup (try_stmt)); 1873 return last_stmt_in_scope (stmt); 1874 } 1875 else 1876 return last_eval; 1877 } 1878 1879 default: 1880 return stmt; 1881 } 1882 } 1883 1884 /* Collect interesting labels in LABELS and return the statement preceding 1885 another case label, or a user-defined label. */ 1886 1887 static gimple * 1888 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p, 1889 auto_vec <struct label_entry> *labels) 1890 { 1891 gimple *prev = NULL; 1892 1893 do 1894 { 1895 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND 1896 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY) 1897 { 1898 /* Nested scope. Only look at the last statement of 1899 the innermost scope. */ 1900 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p)); 1901 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p)); 1902 if (last) 1903 { 1904 prev = last; 1905 /* It might be a label without a location. Use the 1906 location of the scope then. */ 1907 if (!gimple_has_location (prev)) 1908 gimple_set_location (prev, bind_loc); 1909 } 1910 gsi_next (gsi_p); 1911 continue; 1912 } 1913 1914 /* Ifs are tricky. */ 1915 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND) 1916 { 1917 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p)); 1918 tree false_lab = gimple_cond_false_label (cond_stmt); 1919 location_t if_loc = gimple_location (cond_stmt); 1920 1921 /* If we have e.g. 1922 if (i > 1) goto <D.2259>; else goto D; 1923 we can't do much with the else-branch. */ 1924 if (!DECL_ARTIFICIAL (false_lab)) 1925 break; 1926 1927 /* Go on until the false label, then one step back. */ 1928 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p)) 1929 { 1930 gimple *stmt = gsi_stmt (*gsi_p); 1931 if (gimple_code (stmt) == GIMPLE_LABEL 1932 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab) 1933 break; 1934 } 1935 1936 /* Not found? Oops. */ 1937 if (gsi_end_p (*gsi_p)) 1938 break; 1939 1940 struct label_entry l = { false_lab, if_loc }; 1941 labels->safe_push (l); 1942 1943 /* Go to the last statement of the then branch. */ 1944 gsi_prev (gsi_p); 1945 1946 /* if (i != 0) goto <D.1759>; else goto <D.1760>; 1947 <D.1759>: 1948 <stmt>; 1949 goto <D.1761>; 1950 <D.1760>: 1951 */ 1952 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO 1953 && !gimple_has_location (gsi_stmt (*gsi_p))) 1954 { 1955 /* Look at the statement before, it might be 1956 attribute fallthrough, in which case don't warn. */ 1957 gsi_prev (gsi_p); 1958 bool fallthru_before_dest 1959 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH); 1960 gsi_next (gsi_p); 1961 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p)); 1962 if (!fallthru_before_dest) 1963 { 1964 struct label_entry l = { goto_dest, if_loc }; 1965 labels->safe_push (l); 1966 } 1967 } 1968 /* And move back. */ 1969 gsi_next (gsi_p); 1970 } 1971 1972 /* Remember the last statement. Skip labels that are of no interest 1973 to us. */ 1974 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL) 1975 { 1976 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p))); 1977 if (find_label_entry (labels, label)) 1978 prev = gsi_stmt (*gsi_p); 1979 } 1980 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK)) 1981 ; 1982 else 1983 prev = gsi_stmt (*gsi_p); 1984 gsi_next (gsi_p); 1985 } 1986 while (!gsi_end_p (*gsi_p) 1987 /* Stop if we find a case or a user-defined label. */ 1988 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL 1989 || !gimple_has_location (gsi_stmt (*gsi_p)))); 1990 1991 return prev; 1992 } 1993 1994 /* Return true if the switch fallthough warning should occur. LABEL is 1995 the label statement that we're falling through to. */ 1996 1997 static bool 1998 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label) 1999 { 2000 gimple_stmt_iterator gsi = *gsi_p; 2001 2002 /* Don't warn if the label is marked with a "falls through" comment. */ 2003 if (FALLTHROUGH_LABEL_P (label)) 2004 return false; 2005 2006 /* Don't warn for non-case labels followed by a statement: 2007 case 0: 2008 foo (); 2009 label: 2010 bar (); 2011 as these are likely intentional. */ 2012 if (!case_label_p (&gimplify_ctxp->case_labels, label)) 2013 { 2014 tree l; 2015 while (!gsi_end_p (gsi) 2016 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL 2017 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi)))) 2018 && !case_label_p (&gimplify_ctxp->case_labels, l)) 2019 gsi_next (&gsi); 2020 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL) 2021 return false; 2022 } 2023 2024 /* Don't warn for terminated branches, i.e. when the subsequent case labels 2025 immediately breaks. */ 2026 gsi = *gsi_p; 2027 2028 /* Skip all immediately following labels. */ 2029 while (!gsi_end_p (gsi) && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL) 2030 gsi_next (&gsi); 2031 2032 /* { ... something; default:; } */ 2033 if (gsi_end_p (gsi) 2034 /* { ... something; default: break; } or 2035 { ... something; default: goto L; } */ 2036 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO 2037 /* { ... something; default: return; } */ 2038 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN) 2039 return false; 2040 2041 return true; 2042 } 2043 2044 /* Callback for walk_gimple_seq. */ 2045 2046 static tree 2047 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, 2048 struct walk_stmt_info *) 2049 { 2050 gimple *stmt = gsi_stmt (*gsi_p); 2051 2052 *handled_ops_p = true; 2053 switch (gimple_code (stmt)) 2054 { 2055 case GIMPLE_TRY: 2056 case GIMPLE_BIND: 2057 case GIMPLE_CATCH: 2058 case GIMPLE_EH_FILTER: 2059 case GIMPLE_TRANSACTION: 2060 /* Walk the sub-statements. */ 2061 *handled_ops_p = false; 2062 break; 2063 2064 /* Find a sequence of form: 2065 2066 GIMPLE_LABEL 2067 [...] 2068 <may fallthru stmt> 2069 GIMPLE_LABEL 2070 2071 and possibly warn. */ 2072 case GIMPLE_LABEL: 2073 { 2074 /* Found a label. Skip all immediately following labels. */ 2075 while (!gsi_end_p (*gsi_p) 2076 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL) 2077 gsi_next (gsi_p); 2078 2079 /* There might be no more statements. */ 2080 if (gsi_end_p (*gsi_p)) 2081 return integer_zero_node; 2082 2083 /* Vector of labels that fall through. */ 2084 auto_vec <struct label_entry> labels; 2085 gimple *prev = collect_fallthrough_labels (gsi_p, &labels); 2086 2087 /* There might be no more statements. */ 2088 if (gsi_end_p (*gsi_p)) 2089 return integer_zero_node; 2090 2091 gimple *next = gsi_stmt (*gsi_p); 2092 tree label; 2093 /* If what follows is a label, then we may have a fallthrough. */ 2094 if (gimple_code (next) == GIMPLE_LABEL 2095 && gimple_has_location (next) 2096 && (label = gimple_label_label (as_a <glabel *> (next))) 2097 && prev != NULL) 2098 { 2099 struct label_entry *l; 2100 bool warned_p = false; 2101 if (!should_warn_for_implicit_fallthrough (gsi_p, label)) 2102 /* Quiet. */; 2103 else if (gimple_code (prev) == GIMPLE_LABEL 2104 && (label = gimple_label_label (as_a <glabel *> (prev))) 2105 && (l = find_label_entry (&labels, label))) 2106 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_, 2107 "this statement may fall through"); 2108 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH) 2109 /* Try to be clever and don't warn when the statement 2110 can't actually fall through. */ 2111 && gimple_stmt_may_fallthru (prev) 2112 && gimple_has_location (prev)) 2113 warned_p = warning_at (gimple_location (prev), 2114 OPT_Wimplicit_fallthrough_, 2115 "this statement may fall through"); 2116 if (warned_p) 2117 inform (gimple_location (next), "here"); 2118 2119 /* Mark this label as processed so as to prevent multiple 2120 warnings in nested switches. */ 2121 FALLTHROUGH_LABEL_P (label) = true; 2122 2123 /* So that next warn_implicit_fallthrough_r will start looking for 2124 a new sequence starting with this label. */ 2125 gsi_prev (gsi_p); 2126 } 2127 } 2128 break; 2129 default: 2130 break; 2131 } 2132 return NULL_TREE; 2133 } 2134 2135 /* Warn when a switch case falls through. */ 2136 2137 static void 2138 maybe_warn_implicit_fallthrough (gimple_seq seq) 2139 { 2140 if (!warn_implicit_fallthrough) 2141 return; 2142 2143 /* This warning is meant for C/C++/ObjC/ObjC++ only. */ 2144 if (!(lang_GNU_C () 2145 || lang_GNU_CXX () 2146 || lang_GNU_OBJC ())) 2147 return; 2148 2149 struct walk_stmt_info wi; 2150 memset (&wi, 0, sizeof (wi)); 2151 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi); 2152 } 2153 2154 /* Callback for walk_gimple_seq. */ 2155 2156 static tree 2157 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, 2158 struct walk_stmt_info *) 2159 { 2160 gimple *stmt = gsi_stmt (*gsi_p); 2161 2162 *handled_ops_p = true; 2163 switch (gimple_code (stmt)) 2164 { 2165 case GIMPLE_TRY: 2166 case GIMPLE_BIND: 2167 case GIMPLE_CATCH: 2168 case GIMPLE_EH_FILTER: 2169 case GIMPLE_TRANSACTION: 2170 /* Walk the sub-statements. */ 2171 *handled_ops_p = false; 2172 break; 2173 case GIMPLE_CALL: 2174 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH)) 2175 { 2176 gsi_remove (gsi_p, true); 2177 if (gsi_end_p (*gsi_p)) 2178 return integer_zero_node; 2179 2180 bool found = false; 2181 location_t loc = gimple_location (stmt); 2182 2183 gimple_stmt_iterator gsi2 = *gsi_p; 2184 stmt = gsi_stmt (gsi2); 2185 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt)) 2186 { 2187 /* Go on until the artificial label. */ 2188 tree goto_dest = gimple_goto_dest (stmt); 2189 for (; !gsi_end_p (gsi2); gsi_next (&gsi2)) 2190 { 2191 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL 2192 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2))) 2193 == goto_dest) 2194 break; 2195 } 2196 2197 /* Not found? Stop. */ 2198 if (gsi_end_p (gsi2)) 2199 break; 2200 2201 /* Look one past it. */ 2202 gsi_next (&gsi2); 2203 } 2204 2205 /* We're looking for a case label or default label here. */ 2206 while (!gsi_end_p (gsi2)) 2207 { 2208 stmt = gsi_stmt (gsi2); 2209 if (gimple_code (stmt) == GIMPLE_LABEL) 2210 { 2211 tree label = gimple_label_label (as_a <glabel *> (stmt)); 2212 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label)) 2213 { 2214 found = true; 2215 break; 2216 } 2217 } 2218 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK)) 2219 ; 2220 else 2221 /* Something other is not expected. */ 2222 break; 2223 gsi_next (&gsi2); 2224 } 2225 if (!found) 2226 warning_at (loc, 0, "attribute %<fallthrough%> not preceding " 2227 "a case label or default label"); 2228 } 2229 break; 2230 default: 2231 break; 2232 } 2233 return NULL_TREE; 2234 } 2235 2236 /* Expand all FALLTHROUGH () calls in SEQ. */ 2237 2238 static void 2239 expand_FALLTHROUGH (gimple_seq *seq_p) 2240 { 2241 struct walk_stmt_info wi; 2242 memset (&wi, 0, sizeof (wi)); 2243 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi); 2244 } 2245 2246 2247 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can 2248 branch to. */ 2249 2250 static enum gimplify_status 2251 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p) 2252 { 2253 tree switch_expr = *expr_p; 2254 gimple_seq switch_body_seq = NULL; 2255 enum gimplify_status ret; 2256 tree index_type = TREE_TYPE (switch_expr); 2257 if (index_type == NULL_TREE) 2258 index_type = TREE_TYPE (SWITCH_COND (switch_expr)); 2259 2260 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val, 2261 fb_rvalue); 2262 if (ret == GS_ERROR || ret == GS_UNHANDLED) 2263 return ret; 2264 2265 if (SWITCH_BODY (switch_expr)) 2266 { 2267 vec<tree> labels; 2268 vec<tree> saved_labels; 2269 hash_set<tree> *saved_live_switch_vars = NULL; 2270 tree default_case = NULL_TREE; 2271 gswitch *switch_stmt; 2272 2273 /* If someone can be bothered to fill in the labels, they can 2274 be bothered to null out the body too. */ 2275 gcc_assert (!SWITCH_LABELS (switch_expr)); 2276 2277 /* Save old labels, get new ones from body, then restore the old 2278 labels. Save all the things from the switch body to append after. */ 2279 saved_labels = gimplify_ctxp->case_labels; 2280 gimplify_ctxp->case_labels.create (8); 2281 2282 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */ 2283 saved_live_switch_vars = gimplify_ctxp->live_switch_vars; 2284 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr)); 2285 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST) 2286 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4); 2287 else 2288 gimplify_ctxp->live_switch_vars = NULL; 2289 2290 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr; 2291 gimplify_ctxp->in_switch_expr = true; 2292 2293 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq); 2294 2295 gimplify_ctxp->in_switch_expr = old_in_switch_expr; 2296 maybe_warn_switch_unreachable (switch_body_seq); 2297 maybe_warn_implicit_fallthrough (switch_body_seq); 2298 /* Only do this for the outermost GIMPLE_SWITCH. */ 2299 if (!gimplify_ctxp->in_switch_expr) 2300 expand_FALLTHROUGH (&switch_body_seq); 2301 2302 labels = gimplify_ctxp->case_labels; 2303 gimplify_ctxp->case_labels = saved_labels; 2304 2305 if (gimplify_ctxp->live_switch_vars) 2306 { 2307 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0); 2308 delete gimplify_ctxp->live_switch_vars; 2309 } 2310 gimplify_ctxp->live_switch_vars = saved_live_switch_vars; 2311 2312 preprocess_case_label_vec_for_gimple (labels, index_type, 2313 &default_case); 2314 2315 if (!default_case) 2316 { 2317 glabel *new_default; 2318 2319 default_case 2320 = build_case_label (NULL_TREE, NULL_TREE, 2321 create_artificial_label (UNKNOWN_LOCATION)); 2322 new_default = gimple_build_label (CASE_LABEL (default_case)); 2323 gimplify_seq_add_stmt (&switch_body_seq, new_default); 2324 } 2325 2326 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr), 2327 default_case, labels); 2328 gimplify_seq_add_stmt (pre_p, switch_stmt); 2329 gimplify_seq_add_seq (pre_p, switch_body_seq); 2330 labels.release (); 2331 } 2332 else 2333 gcc_assert (SWITCH_LABELS (switch_expr)); 2334 2335 return GS_ALL_DONE; 2336 } 2337 2338 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */ 2339 2340 static enum gimplify_status 2341 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p) 2342 { 2343 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p)) 2344 == current_function_decl); 2345 2346 glabel *label_stmt = gimple_build_label (LABEL_EXPR_LABEL (*expr_p)); 2347 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p)); 2348 gimplify_seq_add_stmt (pre_p, label_stmt); 2349 2350 return GS_ALL_DONE; 2351 } 2352 2353 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */ 2354 2355 static enum gimplify_status 2356 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p) 2357 { 2358 struct gimplify_ctx *ctxp; 2359 glabel *label_stmt; 2360 2361 /* Invalid programs can play Duff's Device type games with, for example, 2362 #pragma omp parallel. At least in the C front end, we don't 2363 detect such invalid branches until after gimplification, in the 2364 diagnose_omp_blocks pass. */ 2365 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context) 2366 if (ctxp->case_labels.exists ()) 2367 break; 2368 2369 label_stmt = gimple_build_label (CASE_LABEL (*expr_p)); 2370 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p)); 2371 ctxp->case_labels.safe_push (*expr_p); 2372 gimplify_seq_add_stmt (pre_p, label_stmt); 2373 2374 return GS_ALL_DONE; 2375 } 2376 2377 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first 2378 if necessary. */ 2379 2380 tree 2381 build_and_jump (tree *label_p) 2382 { 2383 if (label_p == NULL) 2384 /* If there's nowhere to jump, just fall through. */ 2385 return NULL_TREE; 2386 2387 if (*label_p == NULL_TREE) 2388 { 2389 tree label = create_artificial_label (UNKNOWN_LOCATION); 2390 *label_p = label; 2391 } 2392 2393 return build1 (GOTO_EXPR, void_type_node, *label_p); 2394 } 2395 2396 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR. 2397 This also involves building a label to jump to and communicating it to 2398 gimplify_loop_expr through gimplify_ctxp->exit_label. */ 2399 2400 static enum gimplify_status 2401 gimplify_exit_expr (tree *expr_p) 2402 { 2403 tree cond = TREE_OPERAND (*expr_p, 0); 2404 tree expr; 2405 2406 expr = build_and_jump (&gimplify_ctxp->exit_label); 2407 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE); 2408 *expr_p = expr; 2409 2410 return GS_OK; 2411 } 2412 2413 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is 2414 different from its canonical type, wrap the whole thing inside a 2415 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical 2416 type. 2417 2418 The canonical type of a COMPONENT_REF is the type of the field being 2419 referenced--unless the field is a bit-field which can be read directly 2420 in a smaller mode, in which case the canonical type is the 2421 sign-appropriate type corresponding to that mode. */ 2422 2423 static void 2424 canonicalize_component_ref (tree *expr_p) 2425 { 2426 tree expr = *expr_p; 2427 tree type; 2428 2429 gcc_assert (TREE_CODE (expr) == COMPONENT_REF); 2430 2431 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))) 2432 type = TREE_TYPE (get_unwidened (expr, NULL_TREE)); 2433 else 2434 type = TREE_TYPE (TREE_OPERAND (expr, 1)); 2435 2436 /* One could argue that all the stuff below is not necessary for 2437 the non-bitfield case and declare it a FE error if type 2438 adjustment would be needed. */ 2439 if (TREE_TYPE (expr) != type) 2440 { 2441 #ifdef ENABLE_TYPES_CHECKING 2442 tree old_type = TREE_TYPE (expr); 2443 #endif 2444 int type_quals; 2445 2446 /* We need to preserve qualifiers and propagate them from 2447 operand 0. */ 2448 type_quals = TYPE_QUALS (type) 2449 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0))); 2450 if (TYPE_QUALS (type) != type_quals) 2451 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals); 2452 2453 /* Set the type of the COMPONENT_REF to the underlying type. */ 2454 TREE_TYPE (expr) = type; 2455 2456 #ifdef ENABLE_TYPES_CHECKING 2457 /* It is now a FE error, if the conversion from the canonical 2458 type to the original expression type is not useless. */ 2459 gcc_assert (useless_type_conversion_p (old_type, type)); 2460 #endif 2461 } 2462 } 2463 2464 /* If a NOP conversion is changing a pointer to array of foo to a pointer 2465 to foo, embed that change in the ADDR_EXPR by converting 2466 T array[U]; 2467 (T *)&array 2468 ==> 2469 &array[L] 2470 where L is the lower bound. For simplicity, only do this for constant 2471 lower bound. 2472 The constraint is that the type of &array[L] is trivially convertible 2473 to T *. */ 2474 2475 static void 2476 canonicalize_addr_expr (tree *expr_p) 2477 { 2478 tree expr = *expr_p; 2479 tree addr_expr = TREE_OPERAND (expr, 0); 2480 tree datype, ddatype, pddatype; 2481 2482 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */ 2483 if (!POINTER_TYPE_P (TREE_TYPE (expr)) 2484 || TREE_CODE (addr_expr) != ADDR_EXPR) 2485 return; 2486 2487 /* The addr_expr type should be a pointer to an array. */ 2488 datype = TREE_TYPE (TREE_TYPE (addr_expr)); 2489 if (TREE_CODE (datype) != ARRAY_TYPE) 2490 return; 2491 2492 /* The pointer to element type shall be trivially convertible to 2493 the expression pointer type. */ 2494 ddatype = TREE_TYPE (datype); 2495 pddatype = build_pointer_type (ddatype); 2496 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)), 2497 pddatype)) 2498 return; 2499 2500 /* The lower bound and element sizes must be constant. */ 2501 if (!TYPE_SIZE_UNIT (ddatype) 2502 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST 2503 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype)) 2504 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST) 2505 return; 2506 2507 /* All checks succeeded. Build a new node to merge the cast. */ 2508 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0), 2509 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)), 2510 NULL_TREE, NULL_TREE); 2511 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p); 2512 2513 /* We can have stripped a required restrict qualifier above. */ 2514 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p))) 2515 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p); 2516 } 2517 2518 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions 2519 underneath as appropriate. */ 2520 2521 static enum gimplify_status 2522 gimplify_conversion (tree *expr_p) 2523 { 2524 location_t loc = EXPR_LOCATION (*expr_p); 2525 gcc_assert (CONVERT_EXPR_P (*expr_p)); 2526 2527 /* Then strip away all but the outermost conversion. */ 2528 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0)); 2529 2530 /* And remove the outermost conversion if it's useless. */ 2531 if (tree_ssa_useless_type_conversion (*expr_p)) 2532 *expr_p = TREE_OPERAND (*expr_p, 0); 2533 2534 /* If we still have a conversion at the toplevel, 2535 then canonicalize some constructs. */ 2536 if (CONVERT_EXPR_P (*expr_p)) 2537 { 2538 tree sub = TREE_OPERAND (*expr_p, 0); 2539 2540 /* If a NOP conversion is changing the type of a COMPONENT_REF 2541 expression, then canonicalize its type now in order to expose more 2542 redundant conversions. */ 2543 if (TREE_CODE (sub) == COMPONENT_REF) 2544 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0)); 2545 2546 /* If a NOP conversion is changing a pointer to array of foo 2547 to a pointer to foo, embed that change in the ADDR_EXPR. */ 2548 else if (TREE_CODE (sub) == ADDR_EXPR) 2549 canonicalize_addr_expr (expr_p); 2550 } 2551 2552 /* If we have a conversion to a non-register type force the 2553 use of a VIEW_CONVERT_EXPR instead. */ 2554 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p))) 2555 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p), 2556 TREE_OPERAND (*expr_p, 0)); 2557 2558 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */ 2559 if (TREE_CODE (*expr_p) == CONVERT_EXPR) 2560 TREE_SET_CODE (*expr_p, NOP_EXPR); 2561 2562 return GS_OK; 2563 } 2564 2565 /* Nonlocal VLAs seen in the current function. */ 2566 static hash_set<tree> *nonlocal_vlas; 2567 2568 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */ 2569 static tree nonlocal_vla_vars; 2570 2571 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a 2572 DECL_VALUE_EXPR, and it's worth re-examining things. */ 2573 2574 static enum gimplify_status 2575 gimplify_var_or_parm_decl (tree *expr_p) 2576 { 2577 tree decl = *expr_p; 2578 2579 /* ??? If this is a local variable, and it has not been seen in any 2580 outer BIND_EXPR, then it's probably the result of a duplicate 2581 declaration, for which we've already issued an error. It would 2582 be really nice if the front end wouldn't leak these at all. 2583 Currently the only known culprit is C++ destructors, as seen 2584 in g++.old-deja/g++.jason/binding.C. */ 2585 if (VAR_P (decl) 2586 && !DECL_SEEN_IN_BIND_EXPR_P (decl) 2587 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl) 2588 && decl_function_context (decl) == current_function_decl) 2589 { 2590 gcc_assert (seen_error ()); 2591 return GS_ERROR; 2592 } 2593 2594 /* When within an OMP context, notice uses of variables. */ 2595 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true)) 2596 return GS_ALL_DONE; 2597 2598 /* If the decl is an alias for another expression, substitute it now. */ 2599 if (DECL_HAS_VALUE_EXPR_P (decl)) 2600 { 2601 tree value_expr = DECL_VALUE_EXPR (decl); 2602 2603 /* For referenced nonlocal VLAs add a decl for debugging purposes 2604 to the current function. */ 2605 if (VAR_P (decl) 2606 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST 2607 && nonlocal_vlas != NULL 2608 && TREE_CODE (value_expr) == INDIRECT_REF 2609 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL 2610 && decl_function_context (decl) != current_function_decl) 2611 { 2612 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 2613 while (ctx 2614 && (ctx->region_type == ORT_WORKSHARE 2615 || ctx->region_type == ORT_SIMD 2616 || ctx->region_type == ORT_ACC)) 2617 ctx = ctx->outer_context; 2618 if (!ctx && !nonlocal_vlas->add (decl)) 2619 { 2620 tree copy = copy_node (decl); 2621 2622 lang_hooks.dup_lang_specific_decl (copy); 2623 SET_DECL_RTL (copy, 0); 2624 TREE_USED (copy) = 1; 2625 DECL_CHAIN (copy) = nonlocal_vla_vars; 2626 nonlocal_vla_vars = copy; 2627 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr)); 2628 DECL_HAS_VALUE_EXPR_P (copy) = 1; 2629 } 2630 } 2631 2632 *expr_p = unshare_expr (value_expr); 2633 return GS_OK; 2634 } 2635 2636 return GS_ALL_DONE; 2637 } 2638 2639 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */ 2640 2641 static void 2642 recalculate_side_effects (tree t) 2643 { 2644 enum tree_code code = TREE_CODE (t); 2645 int len = TREE_OPERAND_LENGTH (t); 2646 int i; 2647 2648 switch (TREE_CODE_CLASS (code)) 2649 { 2650 case tcc_expression: 2651 switch (code) 2652 { 2653 case INIT_EXPR: 2654 case MODIFY_EXPR: 2655 case VA_ARG_EXPR: 2656 case PREDECREMENT_EXPR: 2657 case PREINCREMENT_EXPR: 2658 case POSTDECREMENT_EXPR: 2659 case POSTINCREMENT_EXPR: 2660 /* All of these have side-effects, no matter what their 2661 operands are. */ 2662 return; 2663 2664 default: 2665 break; 2666 } 2667 /* Fall through. */ 2668 2669 case tcc_comparison: /* a comparison expression */ 2670 case tcc_unary: /* a unary arithmetic expression */ 2671 case tcc_binary: /* a binary arithmetic expression */ 2672 case tcc_reference: /* a reference */ 2673 case tcc_vl_exp: /* a function call */ 2674 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t); 2675 for (i = 0; i < len; ++i) 2676 { 2677 tree op = TREE_OPERAND (t, i); 2678 if (op && TREE_SIDE_EFFECTS (op)) 2679 TREE_SIDE_EFFECTS (t) = 1; 2680 } 2681 break; 2682 2683 case tcc_constant: 2684 /* No side-effects. */ 2685 return; 2686 2687 default: 2688 gcc_unreachable (); 2689 } 2690 } 2691 2692 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR 2693 node *EXPR_P. 2694 2695 compound_lval 2696 : min_lval '[' val ']' 2697 | min_lval '.' ID 2698 | compound_lval '[' val ']' 2699 | compound_lval '.' ID 2700 2701 This is not part of the original SIMPLE definition, which separates 2702 array and member references, but it seems reasonable to handle them 2703 together. Also, this way we don't run into problems with union 2704 aliasing; gcc requires that for accesses through a union to alias, the 2705 union reference must be explicit, which was not always the case when we 2706 were splitting up array and member refs. 2707 2708 PRE_P points to the sequence where side effects that must happen before 2709 *EXPR_P should be stored. 2710 2711 POST_P points to the sequence where side effects that must happen after 2712 *EXPR_P should be stored. */ 2713 2714 static enum gimplify_status 2715 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 2716 fallback_t fallback) 2717 { 2718 tree *p; 2719 enum gimplify_status ret = GS_ALL_DONE, tret; 2720 int i; 2721 location_t loc = EXPR_LOCATION (*expr_p); 2722 tree expr = *expr_p; 2723 2724 /* Create a stack of the subexpressions so later we can walk them in 2725 order from inner to outer. */ 2726 auto_vec<tree, 10> expr_stack; 2727 2728 /* We can handle anything that get_inner_reference can deal with. */ 2729 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0)) 2730 { 2731 restart: 2732 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */ 2733 if (TREE_CODE (*p) == INDIRECT_REF) 2734 *p = fold_indirect_ref_loc (loc, *p); 2735 2736 if (handled_component_p (*p)) 2737 ; 2738 /* Expand DECL_VALUE_EXPR now. In some cases that may expose 2739 additional COMPONENT_REFs. */ 2740 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL) 2741 && gimplify_var_or_parm_decl (p) == GS_OK) 2742 goto restart; 2743 else 2744 break; 2745 2746 expr_stack.safe_push (*p); 2747 } 2748 2749 gcc_assert (expr_stack.length ()); 2750 2751 /* Now EXPR_STACK is a stack of pointers to all the refs we've 2752 walked through and P points to the innermost expression. 2753 2754 Java requires that we elaborated nodes in source order. That 2755 means we must gimplify the inner expression followed by each of 2756 the indices, in order. But we can't gimplify the inner 2757 expression until we deal with any variable bounds, sizes, or 2758 positions in order to deal with PLACEHOLDER_EXPRs. 2759 2760 So we do this in three steps. First we deal with the annotations 2761 for any variables in the components, then we gimplify the base, 2762 then we gimplify any indices, from left to right. */ 2763 for (i = expr_stack.length () - 1; i >= 0; i--) 2764 { 2765 tree t = expr_stack[i]; 2766 2767 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) 2768 { 2769 /* Gimplify the low bound and element type size and put them into 2770 the ARRAY_REF. If these values are set, they have already been 2771 gimplified. */ 2772 if (TREE_OPERAND (t, 2) == NULL_TREE) 2773 { 2774 tree low = unshare_expr (array_ref_low_bound (t)); 2775 if (!is_gimple_min_invariant (low)) 2776 { 2777 TREE_OPERAND (t, 2) = low; 2778 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, 2779 post_p, is_gimple_reg, 2780 fb_rvalue); 2781 ret = MIN (ret, tret); 2782 } 2783 } 2784 else 2785 { 2786 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, 2787 is_gimple_reg, fb_rvalue); 2788 ret = MIN (ret, tret); 2789 } 2790 2791 if (TREE_OPERAND (t, 3) == NULL_TREE) 2792 { 2793 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0))); 2794 tree elmt_size = unshare_expr (array_ref_element_size (t)); 2795 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type)); 2796 2797 /* Divide the element size by the alignment of the element 2798 type (above). */ 2799 elmt_size 2800 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor); 2801 2802 if (!is_gimple_min_invariant (elmt_size)) 2803 { 2804 TREE_OPERAND (t, 3) = elmt_size; 2805 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, 2806 post_p, is_gimple_reg, 2807 fb_rvalue); 2808 ret = MIN (ret, tret); 2809 } 2810 } 2811 else 2812 { 2813 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p, 2814 is_gimple_reg, fb_rvalue); 2815 ret = MIN (ret, tret); 2816 } 2817 } 2818 else if (TREE_CODE (t) == COMPONENT_REF) 2819 { 2820 /* Set the field offset into T and gimplify it. */ 2821 if (TREE_OPERAND (t, 2) == NULL_TREE) 2822 { 2823 tree offset = unshare_expr (component_ref_field_offset (t)); 2824 tree field = TREE_OPERAND (t, 1); 2825 tree factor 2826 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT); 2827 2828 /* Divide the offset by its alignment. */ 2829 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor); 2830 2831 if (!is_gimple_min_invariant (offset)) 2832 { 2833 TREE_OPERAND (t, 2) = offset; 2834 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, 2835 post_p, is_gimple_reg, 2836 fb_rvalue); 2837 ret = MIN (ret, tret); 2838 } 2839 } 2840 else 2841 { 2842 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, 2843 is_gimple_reg, fb_rvalue); 2844 ret = MIN (ret, tret); 2845 } 2846 } 2847 } 2848 2849 /* Step 2 is to gimplify the base expression. Make sure lvalue is set 2850 so as to match the min_lval predicate. Failure to do so may result 2851 in the creation of large aggregate temporaries. */ 2852 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, 2853 fallback | fb_lvalue); 2854 ret = MIN (ret, tret); 2855 2856 /* And finally, the indices and operands of ARRAY_REF. During this 2857 loop we also remove any useless conversions. */ 2858 for (; expr_stack.length () > 0; ) 2859 { 2860 tree t = expr_stack.pop (); 2861 2862 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) 2863 { 2864 /* Gimplify the dimension. */ 2865 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))) 2866 { 2867 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p, 2868 is_gimple_val, fb_rvalue); 2869 ret = MIN (ret, tret); 2870 } 2871 } 2872 2873 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0)); 2874 2875 /* The innermost expression P may have originally had 2876 TREE_SIDE_EFFECTS set which would have caused all the outer 2877 expressions in *EXPR_P leading to P to also have had 2878 TREE_SIDE_EFFECTS set. */ 2879 recalculate_side_effects (t); 2880 } 2881 2882 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */ 2883 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF) 2884 { 2885 canonicalize_component_ref (expr_p); 2886 } 2887 2888 expr_stack.release (); 2889 2890 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE); 2891 2892 return ret; 2893 } 2894 2895 /* Gimplify the self modifying expression pointed to by EXPR_P 2896 (++, --, +=, -=). 2897 2898 PRE_P points to the list where side effects that must happen before 2899 *EXPR_P should be stored. 2900 2901 POST_P points to the list where side effects that must happen after 2902 *EXPR_P should be stored. 2903 2904 WANT_VALUE is nonzero iff we want to use the value of this expression 2905 in another expression. 2906 2907 ARITH_TYPE is the type the computation should be performed in. */ 2908 2909 enum gimplify_status 2910 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 2911 bool want_value, tree arith_type) 2912 { 2913 enum tree_code code; 2914 tree lhs, lvalue, rhs, t1; 2915 gimple_seq post = NULL, *orig_post_p = post_p; 2916 bool postfix; 2917 enum tree_code arith_code; 2918 enum gimplify_status ret; 2919 location_t loc = EXPR_LOCATION (*expr_p); 2920 2921 code = TREE_CODE (*expr_p); 2922 2923 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR 2924 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR); 2925 2926 /* Prefix or postfix? */ 2927 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR) 2928 /* Faster to treat as prefix if result is not used. */ 2929 postfix = want_value; 2930 else 2931 postfix = false; 2932 2933 /* For postfix, make sure the inner expression's post side effects 2934 are executed after side effects from this expression. */ 2935 if (postfix) 2936 post_p = &post; 2937 2938 /* Add or subtract? */ 2939 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) 2940 arith_code = PLUS_EXPR; 2941 else 2942 arith_code = MINUS_EXPR; 2943 2944 /* Gimplify the LHS into a GIMPLE lvalue. */ 2945 lvalue = TREE_OPERAND (*expr_p, 0); 2946 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue); 2947 if (ret == GS_ERROR) 2948 return ret; 2949 2950 /* Extract the operands to the arithmetic operation. */ 2951 lhs = lvalue; 2952 rhs = TREE_OPERAND (*expr_p, 1); 2953 2954 /* For postfix operator, we evaluate the LHS to an rvalue and then use 2955 that as the result value and in the postqueue operation. */ 2956 if (postfix) 2957 { 2958 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue); 2959 if (ret == GS_ERROR) 2960 return ret; 2961 2962 lhs = get_initialized_tmp_var (lhs, pre_p, NULL); 2963 } 2964 2965 /* For POINTERs increment, use POINTER_PLUS_EXPR. */ 2966 if (POINTER_TYPE_P (TREE_TYPE (lhs))) 2967 { 2968 rhs = convert_to_ptrofftype_loc (loc, rhs); 2969 if (arith_code == MINUS_EXPR) 2970 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs); 2971 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs); 2972 } 2973 else 2974 t1 = fold_convert (TREE_TYPE (*expr_p), 2975 fold_build2 (arith_code, arith_type, 2976 fold_convert (arith_type, lhs), 2977 fold_convert (arith_type, rhs))); 2978 2979 if (postfix) 2980 { 2981 gimplify_assign (lvalue, t1, pre_p); 2982 gimplify_seq_add_seq (orig_post_p, post); 2983 *expr_p = lhs; 2984 return GS_ALL_DONE; 2985 } 2986 else 2987 { 2988 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1); 2989 return GS_OK; 2990 } 2991 } 2992 2993 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */ 2994 2995 static void 2996 maybe_with_size_expr (tree *expr_p) 2997 { 2998 tree expr = *expr_p; 2999 tree type = TREE_TYPE (expr); 3000 tree size; 3001 3002 /* If we've already wrapped this or the type is error_mark_node, we can't do 3003 anything. */ 3004 if (TREE_CODE (expr) == WITH_SIZE_EXPR 3005 || type == error_mark_node) 3006 return; 3007 3008 /* If the size isn't known or is a constant, we have nothing to do. */ 3009 size = TYPE_SIZE_UNIT (type); 3010 if (!size || TREE_CODE (size) == INTEGER_CST) 3011 return; 3012 3013 /* Otherwise, make a WITH_SIZE_EXPR. */ 3014 size = unshare_expr (size); 3015 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr); 3016 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size); 3017 } 3018 3019 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P 3020 Store any side-effects in PRE_P. CALL_LOCATION is the location of 3021 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be 3022 gimplified to an SSA name. */ 3023 3024 enum gimplify_status 3025 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location, 3026 bool allow_ssa) 3027 { 3028 bool (*test) (tree); 3029 fallback_t fb; 3030 3031 /* In general, we allow lvalues for function arguments to avoid 3032 extra overhead of copying large aggregates out of even larger 3033 aggregates into temporaries only to copy the temporaries to 3034 the argument list. Make optimizers happy by pulling out to 3035 temporaries those types that fit in registers. */ 3036 if (is_gimple_reg_type (TREE_TYPE (*arg_p))) 3037 test = is_gimple_val, fb = fb_rvalue; 3038 else 3039 { 3040 test = is_gimple_lvalue, fb = fb_either; 3041 /* Also strip a TARGET_EXPR that would force an extra copy. */ 3042 if (TREE_CODE (*arg_p) == TARGET_EXPR) 3043 { 3044 tree init = TARGET_EXPR_INITIAL (*arg_p); 3045 if (init 3046 && !VOID_TYPE_P (TREE_TYPE (init))) 3047 *arg_p = init; 3048 } 3049 } 3050 3051 /* If this is a variable sized type, we must remember the size. */ 3052 maybe_with_size_expr (arg_p); 3053 3054 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */ 3055 /* Make sure arguments have the same location as the function call 3056 itself. */ 3057 protected_set_expr_location (*arg_p, call_location); 3058 3059 /* There is a sequence point before a function call. Side effects in 3060 the argument list must occur before the actual call. So, when 3061 gimplifying arguments, force gimplify_expr to use an internal 3062 post queue which is then appended to the end of PRE_P. */ 3063 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa); 3064 } 3065 3066 /* Don't fold inside offloading or taskreg regions: it can break code by 3067 adding decl references that weren't in the source. We'll do it during 3068 omplower pass instead. */ 3069 3070 static bool 3071 maybe_fold_stmt (gimple_stmt_iterator *gsi) 3072 { 3073 struct gimplify_omp_ctx *ctx; 3074 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context) 3075 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0) 3076 return false; 3077 return fold_stmt (gsi); 3078 } 3079 3080 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P. 3081 WANT_VALUE is true if the result of the call is desired. */ 3082 3083 static enum gimplify_status 3084 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) 3085 { 3086 tree fndecl, parms, p, fnptrtype; 3087 enum gimplify_status ret; 3088 int i, nargs; 3089 gcall *call; 3090 bool builtin_va_start_p = false; 3091 location_t loc = EXPR_LOCATION (*expr_p); 3092 3093 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR); 3094 3095 /* For reliable diagnostics during inlining, it is necessary that 3096 every call_expr be annotated with file and line. */ 3097 if (! EXPR_HAS_LOCATION (*expr_p)) 3098 SET_EXPR_LOCATION (*expr_p, input_location); 3099 3100 /* Gimplify internal functions created in the FEs. */ 3101 if (CALL_EXPR_FN (*expr_p) == NULL_TREE) 3102 { 3103 if (want_value) 3104 return GS_ALL_DONE; 3105 3106 nargs = call_expr_nargs (*expr_p); 3107 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p); 3108 auto_vec<tree> vargs (nargs); 3109 3110 for (i = 0; i < nargs; i++) 3111 { 3112 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, 3113 EXPR_LOCATION (*expr_p)); 3114 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i)); 3115 } 3116 gimple *call = gimple_build_call_internal_vec (ifn, vargs); 3117 gimplify_seq_add_stmt (pre_p, call); 3118 return GS_ALL_DONE; 3119 } 3120 3121 /* This may be a call to a builtin function. 3122 3123 Builtin function calls may be transformed into different 3124 (and more efficient) builtin function calls under certain 3125 circumstances. Unfortunately, gimplification can muck things 3126 up enough that the builtin expanders are not aware that certain 3127 transformations are still valid. 3128 3129 So we attempt transformation/gimplification of the call before 3130 we gimplify the CALL_EXPR. At this time we do not manage to 3131 transform all calls in the same manner as the expanders do, but 3132 we do transform most of them. */ 3133 fndecl = get_callee_fndecl (*expr_p); 3134 if (fndecl 3135 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) 3136 switch (DECL_FUNCTION_CODE (fndecl)) 3137 { 3138 case BUILT_IN_ALLOCA: 3139 case BUILT_IN_ALLOCA_WITH_ALIGN: 3140 /* If the call has been built for a variable-sized object, then we 3141 want to restore the stack level when the enclosing BIND_EXPR is 3142 exited to reclaim the allocated space; otherwise, we precisely 3143 need to do the opposite and preserve the latest stack level. */ 3144 if (CALL_ALLOCA_FOR_VAR_P (*expr_p)) 3145 gimplify_ctxp->save_stack = true; 3146 else 3147 gimplify_ctxp->keep_stack = true; 3148 break; 3149 3150 case BUILT_IN_VA_START: 3151 { 3152 builtin_va_start_p = TRUE; 3153 if (call_expr_nargs (*expr_p) < 2) 3154 { 3155 error ("too few arguments to function %<va_start%>"); 3156 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p)); 3157 return GS_OK; 3158 } 3159 3160 if (fold_builtin_next_arg (*expr_p, true)) 3161 { 3162 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p)); 3163 return GS_OK; 3164 } 3165 break; 3166 } 3167 3168 default: 3169 ; 3170 } 3171 if (fndecl && DECL_BUILT_IN (fndecl)) 3172 { 3173 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value); 3174 if (new_tree && new_tree != *expr_p) 3175 { 3176 /* There was a transformation of this call which computes the 3177 same value, but in a more efficient way. Return and try 3178 again. */ 3179 *expr_p = new_tree; 3180 return GS_OK; 3181 } 3182 } 3183 3184 /* Remember the original function pointer type. */ 3185 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p)); 3186 3187 /* There is a sequence point before the call, so any side effects in 3188 the calling expression must occur before the actual call. Force 3189 gimplify_expr to use an internal post queue. */ 3190 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL, 3191 is_gimple_call_addr, fb_rvalue); 3192 3193 nargs = call_expr_nargs (*expr_p); 3194 3195 /* Get argument types for verification. */ 3196 fndecl = get_callee_fndecl (*expr_p); 3197 parms = NULL_TREE; 3198 if (fndecl) 3199 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); 3200 else 3201 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype)); 3202 3203 if (fndecl && DECL_ARGUMENTS (fndecl)) 3204 p = DECL_ARGUMENTS (fndecl); 3205 else if (parms) 3206 p = parms; 3207 else 3208 p = NULL_TREE; 3209 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p)) 3210 ; 3211 3212 /* If the last argument is __builtin_va_arg_pack () and it is not 3213 passed as a named argument, decrease the number of CALL_EXPR 3214 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */ 3215 if (!p 3216 && i < nargs 3217 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR) 3218 { 3219 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1); 3220 tree last_arg_fndecl = get_callee_fndecl (last_arg); 3221 3222 if (last_arg_fndecl 3223 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL 3224 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL 3225 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK) 3226 { 3227 tree call = *expr_p; 3228 3229 --nargs; 3230 *expr_p = build_call_array_loc (loc, TREE_TYPE (call), 3231 CALL_EXPR_FN (call), 3232 nargs, CALL_EXPR_ARGP (call)); 3233 3234 /* Copy all CALL_EXPR flags, location and block, except 3235 CALL_EXPR_VA_ARG_PACK flag. */ 3236 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call); 3237 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call); 3238 CALL_EXPR_RETURN_SLOT_OPT (*expr_p) 3239 = CALL_EXPR_RETURN_SLOT_OPT (call); 3240 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call); 3241 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call)); 3242 3243 /* Set CALL_EXPR_VA_ARG_PACK. */ 3244 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1; 3245 } 3246 } 3247 3248 /* If the call returns twice then after building the CFG the call 3249 argument computations will no longer dominate the call because 3250 we add an abnormal incoming edge to the call. So do not use SSA 3251 vars there. */ 3252 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE; 3253 3254 /* Gimplify the function arguments. */ 3255 if (nargs > 0) 3256 { 3257 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0); 3258 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs; 3259 PUSH_ARGS_REVERSED ? i-- : i++) 3260 { 3261 enum gimplify_status t; 3262 3263 /* Avoid gimplifying the second argument to va_start, which needs to 3264 be the plain PARM_DECL. */ 3265 if ((i != 1) || !builtin_va_start_p) 3266 { 3267 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, 3268 EXPR_LOCATION (*expr_p), ! returns_twice); 3269 3270 if (t == GS_ERROR) 3271 ret = GS_ERROR; 3272 } 3273 } 3274 } 3275 3276 /* Gimplify the static chain. */ 3277 if (CALL_EXPR_STATIC_CHAIN (*expr_p)) 3278 { 3279 if (fndecl && !DECL_STATIC_CHAIN (fndecl)) 3280 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL; 3281 else 3282 { 3283 enum gimplify_status t; 3284 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p, 3285 EXPR_LOCATION (*expr_p), ! returns_twice); 3286 if (t == GS_ERROR) 3287 ret = GS_ERROR; 3288 } 3289 } 3290 3291 /* Verify the function result. */ 3292 if (want_value && fndecl 3293 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype)))) 3294 { 3295 error_at (loc, "using result of function returning %<void%>"); 3296 ret = GS_ERROR; 3297 } 3298 3299 /* Try this again in case gimplification exposed something. */ 3300 if (ret != GS_ERROR) 3301 { 3302 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value); 3303 3304 if (new_tree && new_tree != *expr_p) 3305 { 3306 /* There was a transformation of this call which computes the 3307 same value, but in a more efficient way. Return and try 3308 again. */ 3309 *expr_p = new_tree; 3310 return GS_OK; 3311 } 3312 } 3313 else 3314 { 3315 *expr_p = error_mark_node; 3316 return GS_ERROR; 3317 } 3318 3319 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its 3320 decl. This allows us to eliminate redundant or useless 3321 calls to "const" functions. */ 3322 if (TREE_CODE (*expr_p) == CALL_EXPR) 3323 { 3324 int flags = call_expr_flags (*expr_p); 3325 if (flags & (ECF_CONST | ECF_PURE) 3326 /* An infinite loop is considered a side effect. */ 3327 && !(flags & (ECF_LOOPING_CONST_OR_PURE))) 3328 TREE_SIDE_EFFECTS (*expr_p) = 0; 3329 } 3330 3331 /* If the value is not needed by the caller, emit a new GIMPLE_CALL 3332 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified 3333 form and delegate the creation of a GIMPLE_CALL to 3334 gimplify_modify_expr. This is always possible because when 3335 WANT_VALUE is true, the caller wants the result of this call into 3336 a temporary, which means that we will emit an INIT_EXPR in 3337 internal_get_tmp_var which will then be handled by 3338 gimplify_modify_expr. */ 3339 if (!want_value) 3340 { 3341 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we 3342 have to do is replicate it as a GIMPLE_CALL tuple. */ 3343 gimple_stmt_iterator gsi; 3344 call = gimple_build_call_from_tree (*expr_p); 3345 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype)); 3346 notice_special_calls (call); 3347 gimplify_seq_add_stmt (pre_p, call); 3348 gsi = gsi_last (*pre_p); 3349 maybe_fold_stmt (&gsi); 3350 *expr_p = NULL_TREE; 3351 } 3352 else 3353 /* Remember the original function type. */ 3354 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype, 3355 CALL_EXPR_FN (*expr_p)); 3356 3357 return ret; 3358 } 3359 3360 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by 3361 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs. 3362 3363 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the 3364 condition is true or false, respectively. If null, we should generate 3365 our own to skip over the evaluation of this specific expression. 3366 3367 LOCUS is the source location of the COND_EXPR. 3368 3369 This function is the tree equivalent of do_jump. 3370 3371 shortcut_cond_r should only be called by shortcut_cond_expr. */ 3372 3373 static tree 3374 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p, 3375 location_t locus) 3376 { 3377 tree local_label = NULL_TREE; 3378 tree t, expr = NULL; 3379 3380 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to 3381 retain the shortcut semantics. Just insert the gotos here; 3382 shortcut_cond_expr will append the real blocks later. */ 3383 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) 3384 { 3385 location_t new_locus; 3386 3387 /* Turn if (a && b) into 3388 3389 if (a); else goto no; 3390 if (b) goto yes; else goto no; 3391 (no:) */ 3392 3393 if (false_label_p == NULL) 3394 false_label_p = &local_label; 3395 3396 /* Keep the original source location on the first 'if'. */ 3397 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus); 3398 append_to_statement_list (t, &expr); 3399 3400 /* Set the source location of the && on the second 'if'. */ 3401 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus; 3402 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p, 3403 new_locus); 3404 append_to_statement_list (t, &expr); 3405 } 3406 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR) 3407 { 3408 location_t new_locus; 3409 3410 /* Turn if (a || b) into 3411 3412 if (a) goto yes; 3413 if (b) goto yes; else goto no; 3414 (yes:) */ 3415 3416 if (true_label_p == NULL) 3417 true_label_p = &local_label; 3418 3419 /* Keep the original source location on the first 'if'. */ 3420 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus); 3421 append_to_statement_list (t, &expr); 3422 3423 /* Set the source location of the || on the second 'if'. */ 3424 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus; 3425 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p, 3426 new_locus); 3427 append_to_statement_list (t, &expr); 3428 } 3429 else if (TREE_CODE (pred) == COND_EXPR 3430 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1))) 3431 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2)))) 3432 { 3433 location_t new_locus; 3434 3435 /* As long as we're messing with gotos, turn if (a ? b : c) into 3436 if (a) 3437 if (b) goto yes; else goto no; 3438 else 3439 if (c) goto yes; else goto no; 3440 3441 Don't do this if one of the arms has void type, which can happen 3442 in C++ when the arm is throw. */ 3443 3444 /* Keep the original source location on the first 'if'. Set the source 3445 location of the ? on the second 'if'. */ 3446 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus; 3447 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0), 3448 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, 3449 false_label_p, locus), 3450 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p, 3451 false_label_p, new_locus)); 3452 } 3453 else 3454 { 3455 expr = build3 (COND_EXPR, void_type_node, pred, 3456 build_and_jump (true_label_p), 3457 build_and_jump (false_label_p)); 3458 SET_EXPR_LOCATION (expr, locus); 3459 } 3460 3461 if (local_label) 3462 { 3463 t = build1 (LABEL_EXPR, void_type_node, local_label); 3464 append_to_statement_list (t, &expr); 3465 } 3466 3467 return expr; 3468 } 3469 3470 /* Given a conditional expression EXPR with short-circuit boolean 3471 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the 3472 predicate apart into the equivalent sequence of conditionals. */ 3473 3474 static tree 3475 shortcut_cond_expr (tree expr) 3476 { 3477 tree pred = TREE_OPERAND (expr, 0); 3478 tree then_ = TREE_OPERAND (expr, 1); 3479 tree else_ = TREE_OPERAND (expr, 2); 3480 tree true_label, false_label, end_label, t; 3481 tree *true_label_p; 3482 tree *false_label_p; 3483 bool emit_end, emit_false, jump_over_else; 3484 bool then_se = then_ && TREE_SIDE_EFFECTS (then_); 3485 bool else_se = else_ && TREE_SIDE_EFFECTS (else_); 3486 3487 /* First do simple transformations. */ 3488 if (!else_se) 3489 { 3490 /* If there is no 'else', turn 3491 if (a && b) then c 3492 into 3493 if (a) if (b) then c. */ 3494 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) 3495 { 3496 /* Keep the original source location on the first 'if'. */ 3497 location_t locus = EXPR_LOC_OR_LOC (expr, input_location); 3498 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); 3499 /* Set the source location of the && on the second 'if'. */ 3500 if (EXPR_HAS_LOCATION (pred)) 3501 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred)); 3502 then_ = shortcut_cond_expr (expr); 3503 then_se = then_ && TREE_SIDE_EFFECTS (then_); 3504 pred = TREE_OPERAND (pred, 0); 3505 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE); 3506 SET_EXPR_LOCATION (expr, locus); 3507 } 3508 } 3509 3510 if (!then_se) 3511 { 3512 /* If there is no 'then', turn 3513 if (a || b); else d 3514 into 3515 if (a); else if (b); else d. */ 3516 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR) 3517 { 3518 /* Keep the original source location on the first 'if'. */ 3519 location_t locus = EXPR_LOC_OR_LOC (expr, input_location); 3520 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); 3521 /* Set the source location of the || on the second 'if'. */ 3522 if (EXPR_HAS_LOCATION (pred)) 3523 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred)); 3524 else_ = shortcut_cond_expr (expr); 3525 else_se = else_ && TREE_SIDE_EFFECTS (else_); 3526 pred = TREE_OPERAND (pred, 0); 3527 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_); 3528 SET_EXPR_LOCATION (expr, locus); 3529 } 3530 } 3531 3532 /* If we're done, great. */ 3533 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR 3534 && TREE_CODE (pred) != TRUTH_ORIF_EXPR) 3535 return expr; 3536 3537 /* Otherwise we need to mess with gotos. Change 3538 if (a) c; else d; 3539 to 3540 if (a); else goto no; 3541 c; goto end; 3542 no: d; end: 3543 and recursively gimplify the condition. */ 3544 3545 true_label = false_label = end_label = NULL_TREE; 3546 3547 /* If our arms just jump somewhere, hijack those labels so we don't 3548 generate jumps to jumps. */ 3549 3550 if (then_ 3551 && TREE_CODE (then_) == GOTO_EXPR 3552 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL) 3553 { 3554 true_label = GOTO_DESTINATION (then_); 3555 then_ = NULL; 3556 then_se = false; 3557 } 3558 3559 if (else_ 3560 && TREE_CODE (else_) == GOTO_EXPR 3561 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL) 3562 { 3563 false_label = GOTO_DESTINATION (else_); 3564 else_ = NULL; 3565 else_se = false; 3566 } 3567 3568 /* If we aren't hijacking a label for the 'then' branch, it falls through. */ 3569 if (true_label) 3570 true_label_p = &true_label; 3571 else 3572 true_label_p = NULL; 3573 3574 /* The 'else' branch also needs a label if it contains interesting code. */ 3575 if (false_label || else_se) 3576 false_label_p = &false_label; 3577 else 3578 false_label_p = NULL; 3579 3580 /* If there was nothing else in our arms, just forward the label(s). */ 3581 if (!then_se && !else_se) 3582 return shortcut_cond_r (pred, true_label_p, false_label_p, 3583 EXPR_LOC_OR_LOC (expr, input_location)); 3584 3585 /* If our last subexpression already has a terminal label, reuse it. */ 3586 if (else_se) 3587 t = expr_last (else_); 3588 else if (then_se) 3589 t = expr_last (then_); 3590 else 3591 t = NULL; 3592 if (t && TREE_CODE (t) == LABEL_EXPR) 3593 end_label = LABEL_EXPR_LABEL (t); 3594 3595 /* If we don't care about jumping to the 'else' branch, jump to the end 3596 if the condition is false. */ 3597 if (!false_label_p) 3598 false_label_p = &end_label; 3599 3600 /* We only want to emit these labels if we aren't hijacking them. */ 3601 emit_end = (end_label == NULL_TREE); 3602 emit_false = (false_label == NULL_TREE); 3603 3604 /* We only emit the jump over the else clause if we have to--if the 3605 then clause may fall through. Otherwise we can wind up with a 3606 useless jump and a useless label at the end of gimplified code, 3607 which will cause us to think that this conditional as a whole 3608 falls through even if it doesn't. If we then inline a function 3609 which ends with such a condition, that can cause us to issue an 3610 inappropriate warning about control reaching the end of a 3611 non-void function. */ 3612 jump_over_else = block_may_fallthru (then_); 3613 3614 pred = shortcut_cond_r (pred, true_label_p, false_label_p, 3615 EXPR_LOC_OR_LOC (expr, input_location)); 3616 3617 expr = NULL; 3618 append_to_statement_list (pred, &expr); 3619 3620 append_to_statement_list (then_, &expr); 3621 if (else_se) 3622 { 3623 if (jump_over_else) 3624 { 3625 tree last = expr_last (expr); 3626 t = build_and_jump (&end_label); 3627 if (EXPR_HAS_LOCATION (last)) 3628 SET_EXPR_LOCATION (t, EXPR_LOCATION (last)); 3629 append_to_statement_list (t, &expr); 3630 } 3631 if (emit_false) 3632 { 3633 t = build1 (LABEL_EXPR, void_type_node, false_label); 3634 append_to_statement_list (t, &expr); 3635 } 3636 append_to_statement_list (else_, &expr); 3637 } 3638 if (emit_end && end_label) 3639 { 3640 t = build1 (LABEL_EXPR, void_type_node, end_label); 3641 append_to_statement_list (t, &expr); 3642 } 3643 3644 return expr; 3645 } 3646 3647 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */ 3648 3649 tree 3650 gimple_boolify (tree expr) 3651 { 3652 tree type = TREE_TYPE (expr); 3653 location_t loc = EXPR_LOCATION (expr); 3654 3655 if (TREE_CODE (expr) == NE_EXPR 3656 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR 3657 && integer_zerop (TREE_OPERAND (expr, 1))) 3658 { 3659 tree call = TREE_OPERAND (expr, 0); 3660 tree fn = get_callee_fndecl (call); 3661 3662 /* For __builtin_expect ((long) (x), y) recurse into x as well 3663 if x is truth_value_p. */ 3664 if (fn 3665 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL 3666 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT 3667 && call_expr_nargs (call) == 2) 3668 { 3669 tree arg = CALL_EXPR_ARG (call, 0); 3670 if (arg) 3671 { 3672 if (TREE_CODE (arg) == NOP_EXPR 3673 && TREE_TYPE (arg) == TREE_TYPE (call)) 3674 arg = TREE_OPERAND (arg, 0); 3675 if (truth_value_p (TREE_CODE (arg))) 3676 { 3677 arg = gimple_boolify (arg); 3678 CALL_EXPR_ARG (call, 0) 3679 = fold_convert_loc (loc, TREE_TYPE (call), arg); 3680 } 3681 } 3682 } 3683 } 3684 3685 switch (TREE_CODE (expr)) 3686 { 3687 case TRUTH_AND_EXPR: 3688 case TRUTH_OR_EXPR: 3689 case TRUTH_XOR_EXPR: 3690 case TRUTH_ANDIF_EXPR: 3691 case TRUTH_ORIF_EXPR: 3692 /* Also boolify the arguments of truth exprs. */ 3693 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1)); 3694 /* FALLTHRU */ 3695 3696 case TRUTH_NOT_EXPR: 3697 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); 3698 3699 /* These expressions always produce boolean results. */ 3700 if (TREE_CODE (type) != BOOLEAN_TYPE) 3701 TREE_TYPE (expr) = boolean_type_node; 3702 return expr; 3703 3704 case ANNOTATE_EXPR: 3705 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1))) 3706 { 3707 case annot_expr_ivdep_kind: 3708 case annot_expr_no_vector_kind: 3709 case annot_expr_vector_kind: 3710 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); 3711 if (TREE_CODE (type) != BOOLEAN_TYPE) 3712 TREE_TYPE (expr) = boolean_type_node; 3713 return expr; 3714 default: 3715 gcc_unreachable (); 3716 } 3717 3718 default: 3719 if (COMPARISON_CLASS_P (expr)) 3720 { 3721 /* There expressions always prduce boolean results. */ 3722 if (TREE_CODE (type) != BOOLEAN_TYPE) 3723 TREE_TYPE (expr) = boolean_type_node; 3724 return expr; 3725 } 3726 /* Other expressions that get here must have boolean values, but 3727 might need to be converted to the appropriate mode. */ 3728 if (TREE_CODE (type) == BOOLEAN_TYPE) 3729 return expr; 3730 return fold_convert_loc (loc, boolean_type_node, expr); 3731 } 3732 } 3733 3734 /* Given a conditional expression *EXPR_P without side effects, gimplify 3735 its operands. New statements are inserted to PRE_P. */ 3736 3737 static enum gimplify_status 3738 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p) 3739 { 3740 tree expr = *expr_p, cond; 3741 enum gimplify_status ret, tret; 3742 enum tree_code code; 3743 3744 cond = gimple_boolify (COND_EXPR_COND (expr)); 3745 3746 /* We need to handle && and || specially, as their gimplification 3747 creates pure cond_expr, thus leading to an infinite cycle otherwise. */ 3748 code = TREE_CODE (cond); 3749 if (code == TRUTH_ANDIF_EXPR) 3750 TREE_SET_CODE (cond, TRUTH_AND_EXPR); 3751 else if (code == TRUTH_ORIF_EXPR) 3752 TREE_SET_CODE (cond, TRUTH_OR_EXPR); 3753 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue); 3754 COND_EXPR_COND (*expr_p) = cond; 3755 3756 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL, 3757 is_gimple_val, fb_rvalue); 3758 ret = MIN (ret, tret); 3759 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL, 3760 is_gimple_val, fb_rvalue); 3761 3762 return MIN (ret, tret); 3763 } 3764 3765 /* Return true if evaluating EXPR could trap. 3766 EXPR is GENERIC, while tree_could_trap_p can be called 3767 only on GIMPLE. */ 3768 3769 static bool 3770 generic_expr_could_trap_p (tree expr) 3771 { 3772 unsigned i, n; 3773 3774 if (!expr || is_gimple_val (expr)) 3775 return false; 3776 3777 if (!EXPR_P (expr) || tree_could_trap_p (expr)) 3778 return true; 3779 3780 n = TREE_OPERAND_LENGTH (expr); 3781 for (i = 0; i < n; i++) 3782 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i))) 3783 return true; 3784 3785 return false; 3786 } 3787 3788 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;' 3789 into 3790 3791 if (p) if (p) 3792 t1 = a; a; 3793 else or else 3794 t1 = b; b; 3795 t1; 3796 3797 The second form is used when *EXPR_P is of type void. 3798 3799 PRE_P points to the list where side effects that must happen before 3800 *EXPR_P should be stored. */ 3801 3802 static enum gimplify_status 3803 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback) 3804 { 3805 tree expr = *expr_p; 3806 tree type = TREE_TYPE (expr); 3807 location_t loc = EXPR_LOCATION (expr); 3808 tree tmp, arm1, arm2; 3809 enum gimplify_status ret; 3810 tree label_true, label_false, label_cont; 3811 bool have_then_clause_p, have_else_clause_p; 3812 gcond *cond_stmt; 3813 enum tree_code pred_code; 3814 gimple_seq seq = NULL; 3815 3816 /* If this COND_EXPR has a value, copy the values into a temporary within 3817 the arms. */ 3818 if (!VOID_TYPE_P (type)) 3819 { 3820 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2); 3821 tree result; 3822 3823 /* If either an rvalue is ok or we do not require an lvalue, create the 3824 temporary. But we cannot do that if the type is addressable. */ 3825 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue)) 3826 && !TREE_ADDRESSABLE (type)) 3827 { 3828 if (gimplify_ctxp->allow_rhs_cond_expr 3829 /* If either branch has side effects or could trap, it can't be 3830 evaluated unconditionally. */ 3831 && !TREE_SIDE_EFFECTS (then_) 3832 && !generic_expr_could_trap_p (then_) 3833 && !TREE_SIDE_EFFECTS (else_) 3834 && !generic_expr_could_trap_p (else_)) 3835 return gimplify_pure_cond_expr (expr_p, pre_p); 3836 3837 tmp = create_tmp_var (type, "iftmp"); 3838 result = tmp; 3839 } 3840 3841 /* Otherwise, only create and copy references to the values. */ 3842 else 3843 { 3844 type = build_pointer_type (type); 3845 3846 if (!VOID_TYPE_P (TREE_TYPE (then_))) 3847 then_ = build_fold_addr_expr_loc (loc, then_); 3848 3849 if (!VOID_TYPE_P (TREE_TYPE (else_))) 3850 else_ = build_fold_addr_expr_loc (loc, else_); 3851 3852 expr 3853 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_); 3854 3855 tmp = create_tmp_var (type, "iftmp"); 3856 result = build_simple_mem_ref_loc (loc, tmp); 3857 } 3858 3859 /* Build the new then clause, `tmp = then_;'. But don't build the 3860 assignment if the value is void; in C++ it can be if it's a throw. */ 3861 if (!VOID_TYPE_P (TREE_TYPE (then_))) 3862 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_); 3863 3864 /* Similarly, build the new else clause, `tmp = else_;'. */ 3865 if (!VOID_TYPE_P (TREE_TYPE (else_))) 3866 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_); 3867 3868 TREE_TYPE (expr) = void_type_node; 3869 recalculate_side_effects (expr); 3870 3871 /* Move the COND_EXPR to the prequeue. */ 3872 gimplify_stmt (&expr, pre_p); 3873 3874 *expr_p = result; 3875 return GS_ALL_DONE; 3876 } 3877 3878 /* Remove any COMPOUND_EXPR so the following cases will be caught. */ 3879 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0)); 3880 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR) 3881 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true); 3882 3883 /* Make sure the condition has BOOLEAN_TYPE. */ 3884 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); 3885 3886 /* Break apart && and || conditions. */ 3887 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR 3888 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR) 3889 { 3890 expr = shortcut_cond_expr (expr); 3891 3892 if (expr != *expr_p) 3893 { 3894 *expr_p = expr; 3895 3896 /* We can't rely on gimplify_expr to re-gimplify the expanded 3897 form properly, as cleanups might cause the target labels to be 3898 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to 3899 set up a conditional context. */ 3900 gimple_push_condition (); 3901 gimplify_stmt (expr_p, &seq); 3902 gimple_pop_condition (pre_p); 3903 gimple_seq_add_seq (pre_p, seq); 3904 3905 return GS_ALL_DONE; 3906 } 3907 } 3908 3909 /* Now do the normal gimplification. */ 3910 3911 /* Gimplify condition. */ 3912 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr, 3913 fb_rvalue); 3914 if (ret == GS_ERROR) 3915 return GS_ERROR; 3916 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE); 3917 3918 gimple_push_condition (); 3919 3920 have_then_clause_p = have_else_clause_p = false; 3921 if (TREE_OPERAND (expr, 1) != NULL 3922 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR 3923 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL 3924 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) 3925 == current_function_decl) 3926 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR 3927 have different locations, otherwise we end up with incorrect 3928 location information on the branches. */ 3929 && (optimize 3930 || !EXPR_HAS_LOCATION (expr) 3931 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1)) 3932 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1)))) 3933 { 3934 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1)); 3935 have_then_clause_p = true; 3936 } 3937 else 3938 label_true = create_artificial_label (UNKNOWN_LOCATION); 3939 if (TREE_OPERAND (expr, 2) != NULL 3940 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR 3941 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL 3942 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) 3943 == current_function_decl) 3944 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR 3945 have different locations, otherwise we end up with incorrect 3946 location information on the branches. */ 3947 && (optimize 3948 || !EXPR_HAS_LOCATION (expr) 3949 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2)) 3950 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2)))) 3951 { 3952 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2)); 3953 have_else_clause_p = true; 3954 } 3955 else 3956 label_false = create_artificial_label (UNKNOWN_LOCATION); 3957 3958 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1, 3959 &arm2); 3960 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, 3961 label_false); 3962 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr))); 3963 gimplify_seq_add_stmt (&seq, cond_stmt); 3964 gimple_stmt_iterator gsi = gsi_last (seq); 3965 maybe_fold_stmt (&gsi); 3966 3967 label_cont = NULL_TREE; 3968 if (!have_then_clause_p) 3969 { 3970 /* For if (...) {} else { code; } put label_true after 3971 the else block. */ 3972 if (TREE_OPERAND (expr, 1) == NULL_TREE 3973 && !have_else_clause_p 3974 && TREE_OPERAND (expr, 2) != NULL_TREE) 3975 label_cont = label_true; 3976 else 3977 { 3978 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true)); 3979 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq); 3980 /* For if (...) { code; } else {} or 3981 if (...) { code; } else goto label; or 3982 if (...) { code; return; } else { ... } 3983 label_cont isn't needed. */ 3984 if (!have_else_clause_p 3985 && TREE_OPERAND (expr, 2) != NULL_TREE 3986 && gimple_seq_may_fallthru (seq)) 3987 { 3988 gimple *g; 3989 label_cont = create_artificial_label (UNKNOWN_LOCATION); 3990 3991 g = gimple_build_goto (label_cont); 3992 3993 /* GIMPLE_COND's are very low level; they have embedded 3994 gotos. This particular embedded goto should not be marked 3995 with the location of the original COND_EXPR, as it would 3996 correspond to the COND_EXPR's condition, not the ELSE or the 3997 THEN arms. To avoid marking it with the wrong location, flag 3998 it as "no location". */ 3999 gimple_set_do_not_emit_location (g); 4000 4001 gimplify_seq_add_stmt (&seq, g); 4002 } 4003 } 4004 } 4005 if (!have_else_clause_p) 4006 { 4007 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false)); 4008 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq); 4009 } 4010 if (label_cont) 4011 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont)); 4012 4013 gimple_pop_condition (pre_p); 4014 gimple_seq_add_seq (pre_p, seq); 4015 4016 if (ret == GS_ERROR) 4017 ; /* Do nothing. */ 4018 else if (have_then_clause_p || have_else_clause_p) 4019 ret = GS_ALL_DONE; 4020 else 4021 { 4022 /* Both arms are empty; replace the COND_EXPR with its predicate. */ 4023 expr = TREE_OPERAND (expr, 0); 4024 gimplify_stmt (&expr, pre_p); 4025 } 4026 4027 *expr_p = NULL; 4028 return ret; 4029 } 4030 4031 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression, 4032 to be marked addressable. 4033 4034 We cannot rely on such an expression being directly markable if a temporary 4035 has been created by the gimplification. In this case, we create another 4036 temporary and initialize it with a copy, which will become a store after we 4037 mark it addressable. This can happen if the front-end passed us something 4038 that it could not mark addressable yet, like a Fortran pass-by-reference 4039 parameter (int) floatvar. */ 4040 4041 static void 4042 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p) 4043 { 4044 while (handled_component_p (*expr_p)) 4045 expr_p = &TREE_OPERAND (*expr_p, 0); 4046 if (is_gimple_reg (*expr_p)) 4047 { 4048 /* Do not allow an SSA name as the temporary. */ 4049 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false); 4050 DECL_GIMPLE_REG_P (var) = 0; 4051 *expr_p = var; 4052 } 4053 } 4054 4055 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with 4056 a call to __builtin_memcpy. */ 4057 4058 static enum gimplify_status 4059 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value, 4060 gimple_seq *seq_p) 4061 { 4062 tree t, to, to_ptr, from, from_ptr; 4063 gcall *gs; 4064 location_t loc = EXPR_LOCATION (*expr_p); 4065 4066 to = TREE_OPERAND (*expr_p, 0); 4067 from = TREE_OPERAND (*expr_p, 1); 4068 4069 /* Mark the RHS addressable. Beware that it may not be possible to do so 4070 directly if a temporary has been created by the gimplification. */ 4071 prepare_gimple_addressable (&from, seq_p); 4072 4073 mark_addressable (from); 4074 from_ptr = build_fold_addr_expr_loc (loc, from); 4075 gimplify_arg (&from_ptr, seq_p, loc); 4076 4077 mark_addressable (to); 4078 to_ptr = build_fold_addr_expr_loc (loc, to); 4079 gimplify_arg (&to_ptr, seq_p, loc); 4080 4081 t = builtin_decl_implicit (BUILT_IN_MEMCPY); 4082 4083 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size); 4084 4085 if (want_value) 4086 { 4087 /* tmp = memcpy() */ 4088 t = create_tmp_var (TREE_TYPE (to_ptr)); 4089 gimple_call_set_lhs (gs, t); 4090 gimplify_seq_add_stmt (seq_p, gs); 4091 4092 *expr_p = build_simple_mem_ref (t); 4093 return GS_ALL_DONE; 4094 } 4095 4096 gimplify_seq_add_stmt (seq_p, gs); 4097 *expr_p = NULL; 4098 return GS_ALL_DONE; 4099 } 4100 4101 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with 4102 a call to __builtin_memset. In this case we know that the RHS is 4103 a CONSTRUCTOR with an empty element list. */ 4104 4105 static enum gimplify_status 4106 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value, 4107 gimple_seq *seq_p) 4108 { 4109 tree t, from, to, to_ptr; 4110 gcall *gs; 4111 location_t loc = EXPR_LOCATION (*expr_p); 4112 4113 /* Assert our assumptions, to abort instead of producing wrong code 4114 silently if they are not met. Beware that the RHS CONSTRUCTOR might 4115 not be immediately exposed. */ 4116 from = TREE_OPERAND (*expr_p, 1); 4117 if (TREE_CODE (from) == WITH_SIZE_EXPR) 4118 from = TREE_OPERAND (from, 0); 4119 4120 gcc_assert (TREE_CODE (from) == CONSTRUCTOR 4121 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from))); 4122 4123 /* Now proceed. */ 4124 to = TREE_OPERAND (*expr_p, 0); 4125 4126 to_ptr = build_fold_addr_expr_loc (loc, to); 4127 gimplify_arg (&to_ptr, seq_p, loc); 4128 t = builtin_decl_implicit (BUILT_IN_MEMSET); 4129 4130 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size); 4131 4132 if (want_value) 4133 { 4134 /* tmp = memset() */ 4135 t = create_tmp_var (TREE_TYPE (to_ptr)); 4136 gimple_call_set_lhs (gs, t); 4137 gimplify_seq_add_stmt (seq_p, gs); 4138 4139 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t); 4140 return GS_ALL_DONE; 4141 } 4142 4143 gimplify_seq_add_stmt (seq_p, gs); 4144 *expr_p = NULL; 4145 return GS_ALL_DONE; 4146 } 4147 4148 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree, 4149 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an 4150 assignment. Return non-null if we detect a potential overlap. */ 4151 4152 struct gimplify_init_ctor_preeval_data 4153 { 4154 /* The base decl of the lhs object. May be NULL, in which case we 4155 have to assume the lhs is indirect. */ 4156 tree lhs_base_decl; 4157 4158 /* The alias set of the lhs object. */ 4159 alias_set_type lhs_alias_set; 4160 }; 4161 4162 static tree 4163 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata) 4164 { 4165 struct gimplify_init_ctor_preeval_data *data 4166 = (struct gimplify_init_ctor_preeval_data *) xdata; 4167 tree t = *tp; 4168 4169 /* If we find the base object, obviously we have overlap. */ 4170 if (data->lhs_base_decl == t) 4171 return t; 4172 4173 /* If the constructor component is indirect, determine if we have a 4174 potential overlap with the lhs. The only bits of information we 4175 have to go on at this point are addressability and alias sets. */ 4176 if ((INDIRECT_REF_P (t) 4177 || TREE_CODE (t) == MEM_REF) 4178 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl)) 4179 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t))) 4180 return t; 4181 4182 /* If the constructor component is a call, determine if it can hide a 4183 potential overlap with the lhs through an INDIRECT_REF like above. 4184 ??? Ugh - this is completely broken. In fact this whole analysis 4185 doesn't look conservative. */ 4186 if (TREE_CODE (t) == CALL_EXPR) 4187 { 4188 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t))); 4189 4190 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type)) 4191 if (POINTER_TYPE_P (TREE_VALUE (type)) 4192 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl)) 4193 && alias_sets_conflict_p (data->lhs_alias_set, 4194 get_alias_set 4195 (TREE_TYPE (TREE_VALUE (type))))) 4196 return t; 4197 } 4198 4199 if (IS_TYPE_OR_DECL_P (t)) 4200 *walk_subtrees = 0; 4201 return NULL; 4202 } 4203 4204 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR, 4205 force values that overlap with the lhs (as described by *DATA) 4206 into temporaries. */ 4207 4208 static void 4209 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 4210 struct gimplify_init_ctor_preeval_data *data) 4211 { 4212 enum gimplify_status one; 4213 4214 /* If the value is constant, then there's nothing to pre-evaluate. */ 4215 if (TREE_CONSTANT (*expr_p)) 4216 { 4217 /* Ensure it does not have side effects, it might contain a reference to 4218 the object we're initializing. */ 4219 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p)); 4220 return; 4221 } 4222 4223 /* If the type has non-trivial constructors, we can't pre-evaluate. */ 4224 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p))) 4225 return; 4226 4227 /* Recurse for nested constructors. */ 4228 if (TREE_CODE (*expr_p) == CONSTRUCTOR) 4229 { 4230 unsigned HOST_WIDE_INT ix; 4231 constructor_elt *ce; 4232 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p); 4233 4234 FOR_EACH_VEC_SAFE_ELT (v, ix, ce) 4235 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data); 4236 4237 return; 4238 } 4239 4240 /* If this is a variable sized type, we must remember the size. */ 4241 maybe_with_size_expr (expr_p); 4242 4243 /* Gimplify the constructor element to something appropriate for the rhs 4244 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know 4245 the gimplifier will consider this a store to memory. Doing this 4246 gimplification now means that we won't have to deal with complicated 4247 language-specific trees, nor trees like SAVE_EXPR that can induce 4248 exponential search behavior. */ 4249 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue); 4250 if (one == GS_ERROR) 4251 { 4252 *expr_p = NULL; 4253 return; 4254 } 4255 4256 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap 4257 with the lhs, since "a = { .x=a }" doesn't make sense. This will 4258 always be true for all scalars, since is_gimple_mem_rhs insists on a 4259 temporary variable for them. */ 4260 if (DECL_P (*expr_p)) 4261 return; 4262 4263 /* If this is of variable size, we have no choice but to assume it doesn't 4264 overlap since we can't make a temporary for it. */ 4265 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST) 4266 return; 4267 4268 /* Otherwise, we must search for overlap ... */ 4269 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL)) 4270 return; 4271 4272 /* ... and if found, force the value into a temporary. */ 4273 *expr_p = get_formal_tmp_var (*expr_p, pre_p); 4274 } 4275 4276 /* A subroutine of gimplify_init_ctor_eval. Create a loop for 4277 a RANGE_EXPR in a CONSTRUCTOR for an array. 4278 4279 var = lower; 4280 loop_entry: 4281 object[var] = value; 4282 if (var == upper) 4283 goto loop_exit; 4284 var = var + 1; 4285 goto loop_entry; 4286 loop_exit: 4287 4288 We increment var _after_ the loop exit check because we might otherwise 4289 fail if upper == TYPE_MAX_VALUE (type for upper). 4290 4291 Note that we never have to deal with SAVE_EXPRs here, because this has 4292 already been taken care of for us, in gimplify_init_ctor_preeval(). */ 4293 4294 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *, 4295 gimple_seq *, bool); 4296 4297 static void 4298 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper, 4299 tree value, tree array_elt_type, 4300 gimple_seq *pre_p, bool cleared) 4301 { 4302 tree loop_entry_label, loop_exit_label, fall_thru_label; 4303 tree var, var_type, cref, tmp; 4304 4305 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION); 4306 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION); 4307 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION); 4308 4309 /* Create and initialize the index variable. */ 4310 var_type = TREE_TYPE (upper); 4311 var = create_tmp_var (var_type); 4312 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower)); 4313 4314 /* Add the loop entry label. */ 4315 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label)); 4316 4317 /* Build the reference. */ 4318 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object), 4319 var, NULL_TREE, NULL_TREE); 4320 4321 /* If we are a constructor, just call gimplify_init_ctor_eval to do 4322 the store. Otherwise just assign value to the reference. */ 4323 4324 if (TREE_CODE (value) == CONSTRUCTOR) 4325 /* NB we might have to call ourself recursively through 4326 gimplify_init_ctor_eval if the value is a constructor. */ 4327 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value), 4328 pre_p, cleared); 4329 else 4330 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value)); 4331 4332 /* We exit the loop when the index var is equal to the upper bound. */ 4333 gimplify_seq_add_stmt (pre_p, 4334 gimple_build_cond (EQ_EXPR, var, upper, 4335 loop_exit_label, fall_thru_label)); 4336 4337 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label)); 4338 4339 /* Otherwise, increment the index var... */ 4340 tmp = build2 (PLUS_EXPR, var_type, var, 4341 fold_convert (var_type, integer_one_node)); 4342 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp)); 4343 4344 /* ...and jump back to the loop entry. */ 4345 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label)); 4346 4347 /* Add the loop exit label. */ 4348 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label)); 4349 } 4350 4351 /* Return true if FDECL is accessing a field that is zero sized. */ 4352 4353 static bool 4354 zero_sized_field_decl (const_tree fdecl) 4355 { 4356 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl) 4357 && integer_zerop (DECL_SIZE (fdecl))) 4358 return true; 4359 return false; 4360 } 4361 4362 /* Return true if TYPE is zero sized. */ 4363 4364 static bool 4365 zero_sized_type (const_tree type) 4366 { 4367 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type) 4368 && integer_zerop (TYPE_SIZE (type))) 4369 return true; 4370 return false; 4371 } 4372 4373 /* A subroutine of gimplify_init_constructor. Generate individual 4374 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the 4375 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the 4376 CONSTRUCTOR. CLEARED is true if the entire LHS object has been 4377 zeroed first. */ 4378 4379 static void 4380 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts, 4381 gimple_seq *pre_p, bool cleared) 4382 { 4383 tree array_elt_type = NULL; 4384 unsigned HOST_WIDE_INT ix; 4385 tree purpose, value; 4386 4387 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE) 4388 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object))); 4389 4390 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value) 4391 { 4392 tree cref; 4393 4394 /* NULL values are created above for gimplification errors. */ 4395 if (value == NULL) 4396 continue; 4397 4398 if (cleared && initializer_zerop (value)) 4399 continue; 4400 4401 /* ??? Here's to hoping the front end fills in all of the indices, 4402 so we don't have to figure out what's missing ourselves. */ 4403 gcc_assert (purpose); 4404 4405 /* Skip zero-sized fields, unless value has side-effects. This can 4406 happen with calls to functions returning a zero-sized type, which 4407 we shouldn't discard. As a number of downstream passes don't 4408 expect sets of zero-sized fields, we rely on the gimplification of 4409 the MODIFY_EXPR we make below to drop the assignment statement. */ 4410 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose)) 4411 continue; 4412 4413 /* If we have a RANGE_EXPR, we have to build a loop to assign the 4414 whole range. */ 4415 if (TREE_CODE (purpose) == RANGE_EXPR) 4416 { 4417 tree lower = TREE_OPERAND (purpose, 0); 4418 tree upper = TREE_OPERAND (purpose, 1); 4419 4420 /* If the lower bound is equal to upper, just treat it as if 4421 upper was the index. */ 4422 if (simple_cst_equal (lower, upper)) 4423 purpose = upper; 4424 else 4425 { 4426 gimplify_init_ctor_eval_range (object, lower, upper, value, 4427 array_elt_type, pre_p, cleared); 4428 continue; 4429 } 4430 } 4431 4432 if (array_elt_type) 4433 { 4434 /* Do not use bitsizetype for ARRAY_REF indices. */ 4435 if (TYPE_DOMAIN (TREE_TYPE (object))) 4436 purpose 4437 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))), 4438 purpose); 4439 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object), 4440 purpose, NULL_TREE, NULL_TREE); 4441 } 4442 else 4443 { 4444 gcc_assert (TREE_CODE (purpose) == FIELD_DECL); 4445 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose), 4446 unshare_expr (object), purpose, NULL_TREE); 4447 } 4448 4449 if (TREE_CODE (value) == CONSTRUCTOR 4450 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE) 4451 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value), 4452 pre_p, cleared); 4453 else 4454 { 4455 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value); 4456 gimplify_and_add (init, pre_p); 4457 ggc_free (init); 4458 } 4459 } 4460 } 4461 4462 /* Return the appropriate RHS predicate for this LHS. */ 4463 4464 gimple_predicate 4465 rhs_predicate_for (tree lhs) 4466 { 4467 if (is_gimple_reg (lhs)) 4468 return is_gimple_reg_rhs_or_call; 4469 else 4470 return is_gimple_mem_rhs_or_call; 4471 } 4472 4473 /* Return the initial guess for an appropriate RHS predicate for this LHS, 4474 before the LHS has been gimplified. */ 4475 4476 static gimple_predicate 4477 initial_rhs_predicate_for (tree lhs) 4478 { 4479 if (is_gimple_reg_type (TREE_TYPE (lhs))) 4480 return is_gimple_reg_rhs_or_call; 4481 else 4482 return is_gimple_mem_rhs_or_call; 4483 } 4484 4485 /* Gimplify a C99 compound literal expression. This just means adding 4486 the DECL_EXPR before the current statement and using its anonymous 4487 decl instead. */ 4488 4489 static enum gimplify_status 4490 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p, 4491 bool (*gimple_test_f) (tree), 4492 fallback_t fallback) 4493 { 4494 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p); 4495 tree decl = DECL_EXPR_DECL (decl_s); 4496 tree init = DECL_INITIAL (decl); 4497 /* Mark the decl as addressable if the compound literal 4498 expression is addressable now, otherwise it is marked too late 4499 after we gimplify the initialization expression. */ 4500 if (TREE_ADDRESSABLE (*expr_p)) 4501 TREE_ADDRESSABLE (decl) = 1; 4502 /* Otherwise, if we don't need an lvalue and have a literal directly 4503 substitute it. Check if it matches the gimple predicate, as 4504 otherwise we'd generate a new temporary, and we can as well just 4505 use the decl we already have. */ 4506 else if (!TREE_ADDRESSABLE (decl) 4507 && !TREE_THIS_VOLATILE (decl) 4508 && init 4509 && (fallback & fb_lvalue) == 0 4510 && gimple_test_f (init)) 4511 { 4512 *expr_p = init; 4513 return GS_OK; 4514 } 4515 4516 /* Preliminarily mark non-addressed complex variables as eligible 4517 for promotion to gimple registers. We'll transform their uses 4518 as we find them. */ 4519 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE 4520 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE) 4521 && !TREE_THIS_VOLATILE (decl) 4522 && !needs_to_live_in_memory (decl)) 4523 DECL_GIMPLE_REG_P (decl) = 1; 4524 4525 /* If the decl is not addressable, then it is being used in some 4526 expression or on the right hand side of a statement, and it can 4527 be put into a readonly data section. */ 4528 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0) 4529 TREE_READONLY (decl) = 1; 4530 4531 /* This decl isn't mentioned in the enclosing block, so add it to the 4532 list of temps. FIXME it seems a bit of a kludge to say that 4533 anonymous artificial vars aren't pushed, but everything else is. */ 4534 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl)) 4535 gimple_add_tmp_var (decl); 4536 4537 gimplify_and_add (decl_s, pre_p); 4538 *expr_p = decl; 4539 return GS_OK; 4540 } 4541 4542 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR, 4543 return a new CONSTRUCTOR if something changed. */ 4544 4545 static tree 4546 optimize_compound_literals_in_ctor (tree orig_ctor) 4547 { 4548 tree ctor = orig_ctor; 4549 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor); 4550 unsigned int idx, num = vec_safe_length (elts); 4551 4552 for (idx = 0; idx < num; idx++) 4553 { 4554 tree value = (*elts)[idx].value; 4555 tree newval = value; 4556 if (TREE_CODE (value) == CONSTRUCTOR) 4557 newval = optimize_compound_literals_in_ctor (value); 4558 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR) 4559 { 4560 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value); 4561 tree decl = DECL_EXPR_DECL (decl_s); 4562 tree init = DECL_INITIAL (decl); 4563 4564 if (!TREE_ADDRESSABLE (value) 4565 && !TREE_ADDRESSABLE (decl) 4566 && init 4567 && TREE_CODE (init) == CONSTRUCTOR) 4568 newval = optimize_compound_literals_in_ctor (init); 4569 } 4570 if (newval == value) 4571 continue; 4572 4573 if (ctor == orig_ctor) 4574 { 4575 ctor = copy_node (orig_ctor); 4576 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts); 4577 elts = CONSTRUCTOR_ELTS (ctor); 4578 } 4579 (*elts)[idx].value = newval; 4580 } 4581 return ctor; 4582 } 4583 4584 /* A subroutine of gimplify_modify_expr. Break out elements of a 4585 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs. 4586 4587 Note that we still need to clear any elements that don't have explicit 4588 initializers, so if not all elements are initialized we keep the 4589 original MODIFY_EXPR, we just remove all of the constructor elements. 4590 4591 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return 4592 GS_ERROR if we would have to create a temporary when gimplifying 4593 this constructor. Otherwise, return GS_OK. 4594 4595 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */ 4596 4597 static enum gimplify_status 4598 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 4599 bool want_value, bool notify_temp_creation) 4600 { 4601 tree object, ctor, type; 4602 enum gimplify_status ret; 4603 vec<constructor_elt, va_gc> *elts; 4604 4605 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR); 4606 4607 if (!notify_temp_creation) 4608 { 4609 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 4610 is_gimple_lvalue, fb_lvalue); 4611 if (ret == GS_ERROR) 4612 return ret; 4613 } 4614 4615 object = TREE_OPERAND (*expr_p, 0); 4616 ctor = TREE_OPERAND (*expr_p, 1) 4617 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1)); 4618 type = TREE_TYPE (ctor); 4619 elts = CONSTRUCTOR_ELTS (ctor); 4620 ret = GS_ALL_DONE; 4621 4622 switch (TREE_CODE (type)) 4623 { 4624 case RECORD_TYPE: 4625 case UNION_TYPE: 4626 case QUAL_UNION_TYPE: 4627 case ARRAY_TYPE: 4628 { 4629 struct gimplify_init_ctor_preeval_data preeval_data; 4630 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements; 4631 bool cleared, complete_p, valid_const_initializer; 4632 4633 /* Aggregate types must lower constructors to initialization of 4634 individual elements. The exception is that a CONSTRUCTOR node 4635 with no elements indicates zero-initialization of the whole. */ 4636 if (vec_safe_is_empty (elts)) 4637 { 4638 if (notify_temp_creation) 4639 return GS_OK; 4640 break; 4641 } 4642 4643 /* Fetch information about the constructor to direct later processing. 4644 We might want to make static versions of it in various cases, and 4645 can only do so if it known to be a valid constant initializer. */ 4646 valid_const_initializer 4647 = categorize_ctor_elements (ctor, &num_nonzero_elements, 4648 &num_ctor_elements, &complete_p); 4649 4650 /* If a const aggregate variable is being initialized, then it 4651 should never be a lose to promote the variable to be static. */ 4652 if (valid_const_initializer 4653 && num_nonzero_elements > 1 4654 && TREE_READONLY (object) 4655 && VAR_P (object) 4656 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))) 4657 { 4658 if (notify_temp_creation) 4659 return GS_ERROR; 4660 DECL_INITIAL (object) = ctor; 4661 TREE_STATIC (object) = 1; 4662 if (!DECL_NAME (object)) 4663 DECL_NAME (object) = create_tmp_var_name ("C"); 4664 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL); 4665 4666 /* ??? C++ doesn't automatically append a .<number> to the 4667 assembler name, and even when it does, it looks at FE private 4668 data structures to figure out what that number should be, 4669 which are not set for this variable. I suppose this is 4670 important for local statics for inline functions, which aren't 4671 "local" in the object file sense. So in order to get a unique 4672 TU-local symbol, we must invoke the lhd version now. */ 4673 lhd_set_decl_assembler_name (object); 4674 4675 *expr_p = NULL_TREE; 4676 break; 4677 } 4678 4679 /* If there are "lots" of initialized elements, even discounting 4680 those that are not address constants (and thus *must* be 4681 computed at runtime), then partition the constructor into 4682 constant and non-constant parts. Block copy the constant 4683 parts in, then generate code for the non-constant parts. */ 4684 /* TODO. There's code in cp/typeck.c to do this. */ 4685 4686 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0) 4687 /* store_constructor will ignore the clearing of variable-sized 4688 objects. Initializers for such objects must explicitly set 4689 every field that needs to be set. */ 4690 cleared = false; 4691 else if (!complete_p) 4692 /* If the constructor isn't complete, clear the whole object 4693 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it. 4694 4695 ??? This ought not to be needed. For any element not present 4696 in the initializer, we should simply set them to zero. Except 4697 we'd need to *find* the elements that are not present, and that 4698 requires trickery to avoid quadratic compile-time behavior in 4699 large cases or excessive memory use in small cases. */ 4700 cleared = !CONSTRUCTOR_NO_CLEARING (ctor); 4701 else if (num_ctor_elements - num_nonzero_elements 4702 > CLEAR_RATIO (optimize_function_for_speed_p (cfun)) 4703 && num_nonzero_elements < num_ctor_elements / 4) 4704 /* If there are "lots" of zeros, it's more efficient to clear 4705 the memory and then set the nonzero elements. */ 4706 cleared = true; 4707 else 4708 cleared = false; 4709 4710 /* If there are "lots" of initialized elements, and all of them 4711 are valid address constants, then the entire initializer can 4712 be dropped to memory, and then memcpy'd out. Don't do this 4713 for sparse arrays, though, as it's more efficient to follow 4714 the standard CONSTRUCTOR behavior of memset followed by 4715 individual element initialization. Also don't do this for small 4716 all-zero initializers (which aren't big enough to merit 4717 clearing), and don't try to make bitwise copies of 4718 TREE_ADDRESSABLE types. 4719 4720 We cannot apply such transformation when compiling chkp static 4721 initializer because creation of initializer image in the memory 4722 will require static initialization of bounds for it. It should 4723 result in another gimplification of similar initializer and we 4724 may fall into infinite loop. */ 4725 if (valid_const_initializer 4726 && !(cleared || num_nonzero_elements == 0) 4727 && !TREE_ADDRESSABLE (type) 4728 && (!current_function_decl 4729 || !lookup_attribute ("chkp ctor", 4730 DECL_ATTRIBUTES (current_function_decl)))) 4731 { 4732 HOST_WIDE_INT size = int_size_in_bytes (type); 4733 unsigned int align; 4734 4735 /* ??? We can still get unbounded array types, at least 4736 from the C++ front end. This seems wrong, but attempt 4737 to work around it for now. */ 4738 if (size < 0) 4739 { 4740 size = int_size_in_bytes (TREE_TYPE (object)); 4741 if (size >= 0) 4742 TREE_TYPE (ctor) = type = TREE_TYPE (object); 4743 } 4744 4745 /* Find the maximum alignment we can assume for the object. */ 4746 /* ??? Make use of DECL_OFFSET_ALIGN. */ 4747 if (DECL_P (object)) 4748 align = DECL_ALIGN (object); 4749 else 4750 align = TYPE_ALIGN (type); 4751 4752 /* Do a block move either if the size is so small as to make 4753 each individual move a sub-unit move on average, or if it 4754 is so large as to make individual moves inefficient. */ 4755 if (size > 0 4756 && num_nonzero_elements > 1 4757 && (size < num_nonzero_elements 4758 || !can_move_by_pieces (size, align))) 4759 { 4760 if (notify_temp_creation) 4761 return GS_ERROR; 4762 4763 walk_tree (&ctor, force_labels_r, NULL, NULL); 4764 ctor = tree_output_constant_def (ctor); 4765 if (!useless_type_conversion_p (type, TREE_TYPE (ctor))) 4766 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor); 4767 TREE_OPERAND (*expr_p, 1) = ctor; 4768 4769 /* This is no longer an assignment of a CONSTRUCTOR, but 4770 we still may have processing to do on the LHS. So 4771 pretend we didn't do anything here to let that happen. */ 4772 return GS_UNHANDLED; 4773 } 4774 } 4775 4776 /* If the target is volatile, we have non-zero elements and more than 4777 one field to assign, initialize the target from a temporary. */ 4778 if (TREE_THIS_VOLATILE (object) 4779 && !TREE_ADDRESSABLE (type) 4780 && (num_nonzero_elements > 0 || !cleared) 4781 && vec_safe_length (elts) > 1) 4782 { 4783 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type)); 4784 TREE_OPERAND (*expr_p, 0) = temp; 4785 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p), 4786 *expr_p, 4787 build2 (MODIFY_EXPR, void_type_node, 4788 object, temp)); 4789 return GS_OK; 4790 } 4791 4792 if (notify_temp_creation) 4793 return GS_OK; 4794 4795 /* If there are nonzero elements and if needed, pre-evaluate to capture 4796 elements overlapping with the lhs into temporaries. We must do this 4797 before clearing to fetch the values before they are zeroed-out. */ 4798 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR) 4799 { 4800 preeval_data.lhs_base_decl = get_base_address (object); 4801 if (!DECL_P (preeval_data.lhs_base_decl)) 4802 preeval_data.lhs_base_decl = NULL; 4803 preeval_data.lhs_alias_set = get_alias_set (object); 4804 4805 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1), 4806 pre_p, post_p, &preeval_data); 4807 } 4808 4809 bool ctor_has_side_effects_p 4810 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1)); 4811 4812 if (cleared) 4813 { 4814 /* Zap the CONSTRUCTOR element list, which simplifies this case. 4815 Note that we still have to gimplify, in order to handle the 4816 case of variable sized types. Avoid shared tree structures. */ 4817 CONSTRUCTOR_ELTS (ctor) = NULL; 4818 TREE_SIDE_EFFECTS (ctor) = 0; 4819 object = unshare_expr (object); 4820 gimplify_stmt (expr_p, pre_p); 4821 } 4822 4823 /* If we have not block cleared the object, or if there are nonzero 4824 elements in the constructor, or if the constructor has side effects, 4825 add assignments to the individual scalar fields of the object. */ 4826 if (!cleared 4827 || num_nonzero_elements > 0 4828 || ctor_has_side_effects_p) 4829 gimplify_init_ctor_eval (object, elts, pre_p, cleared); 4830 4831 *expr_p = NULL_TREE; 4832 } 4833 break; 4834 4835 case COMPLEX_TYPE: 4836 { 4837 tree r, i; 4838 4839 if (notify_temp_creation) 4840 return GS_OK; 4841 4842 /* Extract the real and imaginary parts out of the ctor. */ 4843 gcc_assert (elts->length () == 2); 4844 r = (*elts)[0].value; 4845 i = (*elts)[1].value; 4846 if (r == NULL || i == NULL) 4847 { 4848 tree zero = build_zero_cst (TREE_TYPE (type)); 4849 if (r == NULL) 4850 r = zero; 4851 if (i == NULL) 4852 i = zero; 4853 } 4854 4855 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to 4856 represent creation of a complex value. */ 4857 if (TREE_CONSTANT (r) && TREE_CONSTANT (i)) 4858 { 4859 ctor = build_complex (type, r, i); 4860 TREE_OPERAND (*expr_p, 1) = ctor; 4861 } 4862 else 4863 { 4864 ctor = build2 (COMPLEX_EXPR, type, r, i); 4865 TREE_OPERAND (*expr_p, 1) = ctor; 4866 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), 4867 pre_p, 4868 post_p, 4869 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)), 4870 fb_rvalue); 4871 } 4872 } 4873 break; 4874 4875 case VECTOR_TYPE: 4876 { 4877 unsigned HOST_WIDE_INT ix; 4878 constructor_elt *ce; 4879 4880 if (notify_temp_creation) 4881 return GS_OK; 4882 4883 /* Go ahead and simplify constant constructors to VECTOR_CST. */ 4884 if (TREE_CONSTANT (ctor)) 4885 { 4886 bool constant_p = true; 4887 tree value; 4888 4889 /* Even when ctor is constant, it might contain non-*_CST 4890 elements, such as addresses or trapping values like 4891 1.0/0.0 - 1.0/0.0. Such expressions don't belong 4892 in VECTOR_CST nodes. */ 4893 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value) 4894 if (!CONSTANT_CLASS_P (value)) 4895 { 4896 constant_p = false; 4897 break; 4898 } 4899 4900 if (constant_p) 4901 { 4902 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts); 4903 break; 4904 } 4905 4906 TREE_CONSTANT (ctor) = 0; 4907 } 4908 4909 /* Vector types use CONSTRUCTOR all the way through gimple 4910 compilation as a general initializer. */ 4911 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce) 4912 { 4913 enum gimplify_status tret; 4914 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val, 4915 fb_rvalue); 4916 if (tret == GS_ERROR) 4917 ret = GS_ERROR; 4918 else if (TREE_STATIC (ctor) 4919 && !initializer_constant_valid_p (ce->value, 4920 TREE_TYPE (ce->value))) 4921 TREE_STATIC (ctor) = 0; 4922 } 4923 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0))) 4924 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p); 4925 } 4926 break; 4927 4928 default: 4929 /* So how did we get a CONSTRUCTOR for a scalar type? */ 4930 gcc_unreachable (); 4931 } 4932 4933 if (ret == GS_ERROR) 4934 return GS_ERROR; 4935 /* If we have gimplified both sides of the initializer but have 4936 not emitted an assignment, do so now. */ 4937 if (*expr_p) 4938 { 4939 tree lhs = TREE_OPERAND (*expr_p, 0); 4940 tree rhs = TREE_OPERAND (*expr_p, 1); 4941 if (want_value && object == lhs) 4942 lhs = unshare_expr (lhs); 4943 gassign *init = gimple_build_assign (lhs, rhs); 4944 gimplify_seq_add_stmt (pre_p, init); 4945 } 4946 if (want_value) 4947 { 4948 *expr_p = object; 4949 return GS_OK; 4950 } 4951 else 4952 { 4953 *expr_p = NULL; 4954 return GS_ALL_DONE; 4955 } 4956 } 4957 4958 /* Given a pointer value OP0, return a simplified version of an 4959 indirection through OP0, or NULL_TREE if no simplification is 4960 possible. This may only be applied to a rhs of an expression. 4961 Note that the resulting type may be different from the type pointed 4962 to in the sense that it is still compatible from the langhooks 4963 point of view. */ 4964 4965 static tree 4966 gimple_fold_indirect_ref_rhs (tree t) 4967 { 4968 return gimple_fold_indirect_ref (t); 4969 } 4970 4971 /* Subroutine of gimplify_modify_expr to do simplifications of 4972 MODIFY_EXPRs based on the code of the RHS. We loop for as long as 4973 something changes. */ 4974 4975 static enum gimplify_status 4976 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, 4977 gimple_seq *pre_p, gimple_seq *post_p, 4978 bool want_value) 4979 { 4980 enum gimplify_status ret = GS_UNHANDLED; 4981 bool changed; 4982 4983 do 4984 { 4985 changed = false; 4986 switch (TREE_CODE (*from_p)) 4987 { 4988 case VAR_DECL: 4989 /* If we're assigning from a read-only variable initialized with 4990 a constructor, do the direct assignment from the constructor, 4991 but only if neither source nor target are volatile since this 4992 latter assignment might end up being done on a per-field basis. */ 4993 if (DECL_INITIAL (*from_p) 4994 && TREE_READONLY (*from_p) 4995 && !TREE_THIS_VOLATILE (*from_p) 4996 && !TREE_THIS_VOLATILE (*to_p) 4997 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR) 4998 { 4999 tree old_from = *from_p; 5000 enum gimplify_status subret; 5001 5002 /* Move the constructor into the RHS. */ 5003 *from_p = unshare_expr (DECL_INITIAL (*from_p)); 5004 5005 /* Let's see if gimplify_init_constructor will need to put 5006 it in memory. */ 5007 subret = gimplify_init_constructor (expr_p, NULL, NULL, 5008 false, true); 5009 if (subret == GS_ERROR) 5010 { 5011 /* If so, revert the change. */ 5012 *from_p = old_from; 5013 } 5014 else 5015 { 5016 ret = GS_OK; 5017 changed = true; 5018 } 5019 } 5020 break; 5021 case INDIRECT_REF: 5022 { 5023 /* If we have code like 5024 5025 *(const A*)(A*)&x 5026 5027 where the type of "x" is a (possibly cv-qualified variant 5028 of "A"), treat the entire expression as identical to "x". 5029 This kind of code arises in C++ when an object is bound 5030 to a const reference, and if "x" is a TARGET_EXPR we want 5031 to take advantage of the optimization below. */ 5032 bool volatile_p = TREE_THIS_VOLATILE (*from_p); 5033 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)); 5034 if (t) 5035 { 5036 if (TREE_THIS_VOLATILE (t) != volatile_p) 5037 { 5038 if (DECL_P (t)) 5039 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p), 5040 build_fold_addr_expr (t)); 5041 if (REFERENCE_CLASS_P (t)) 5042 TREE_THIS_VOLATILE (t) = volatile_p; 5043 } 5044 *from_p = t; 5045 ret = GS_OK; 5046 changed = true; 5047 } 5048 break; 5049 } 5050 5051 case TARGET_EXPR: 5052 { 5053 /* If we are initializing something from a TARGET_EXPR, strip the 5054 TARGET_EXPR and initialize it directly, if possible. This can't 5055 be done if the initializer is void, since that implies that the 5056 temporary is set in some non-trivial way. 5057 5058 ??? What about code that pulls out the temp and uses it 5059 elsewhere? I think that such code never uses the TARGET_EXPR as 5060 an initializer. If I'm wrong, we'll die because the temp won't 5061 have any RTL. In that case, I guess we'll need to replace 5062 references somehow. */ 5063 tree init = TARGET_EXPR_INITIAL (*from_p); 5064 5065 if (init 5066 && !VOID_TYPE_P (TREE_TYPE (init))) 5067 { 5068 *from_p = init; 5069 ret = GS_OK; 5070 changed = true; 5071 } 5072 } 5073 break; 5074 5075 case COMPOUND_EXPR: 5076 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be 5077 caught. */ 5078 gimplify_compound_expr (from_p, pre_p, true); 5079 ret = GS_OK; 5080 changed = true; 5081 break; 5082 5083 case CONSTRUCTOR: 5084 /* If we already made some changes, let the front end have a 5085 crack at this before we break it down. */ 5086 if (ret != GS_UNHANDLED) 5087 break; 5088 /* If we're initializing from a CONSTRUCTOR, break this into 5089 individual MODIFY_EXPRs. */ 5090 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value, 5091 false); 5092 5093 case COND_EXPR: 5094 /* If we're assigning to a non-register type, push the assignment 5095 down into the branches. This is mandatory for ADDRESSABLE types, 5096 since we cannot generate temporaries for such, but it saves a 5097 copy in other cases as well. */ 5098 if (!is_gimple_reg_type (TREE_TYPE (*from_p))) 5099 { 5100 /* This code should mirror the code in gimplify_cond_expr. */ 5101 enum tree_code code = TREE_CODE (*expr_p); 5102 tree cond = *from_p; 5103 tree result = *to_p; 5104 5105 ret = gimplify_expr (&result, pre_p, post_p, 5106 is_gimple_lvalue, fb_lvalue); 5107 if (ret != GS_ERROR) 5108 ret = GS_OK; 5109 5110 /* If we are going to write RESULT more than once, clear 5111 TREE_READONLY flag, otherwise we might incorrectly promote 5112 the variable to static const and initialize it at compile 5113 time in one of the branches. */ 5114 if (VAR_P (result) 5115 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node 5116 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node) 5117 TREE_READONLY (result) = 0; 5118 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node) 5119 TREE_OPERAND (cond, 1) 5120 = build2 (code, void_type_node, result, 5121 TREE_OPERAND (cond, 1)); 5122 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node) 5123 TREE_OPERAND (cond, 2) 5124 = build2 (code, void_type_node, unshare_expr (result), 5125 TREE_OPERAND (cond, 2)); 5126 5127 TREE_TYPE (cond) = void_type_node; 5128 recalculate_side_effects (cond); 5129 5130 if (want_value) 5131 { 5132 gimplify_and_add (cond, pre_p); 5133 *expr_p = unshare_expr (result); 5134 } 5135 else 5136 *expr_p = cond; 5137 return ret; 5138 } 5139 break; 5140 5141 case CALL_EXPR: 5142 /* For calls that return in memory, give *to_p as the CALL_EXPR's 5143 return slot so that we don't generate a temporary. */ 5144 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p) 5145 && aggregate_value_p (*from_p, *from_p)) 5146 { 5147 bool use_target; 5148 5149 if (!(rhs_predicate_for (*to_p))(*from_p)) 5150 /* If we need a temporary, *to_p isn't accurate. */ 5151 use_target = false; 5152 /* It's OK to use the return slot directly unless it's an NRV. */ 5153 else if (TREE_CODE (*to_p) == RESULT_DECL 5154 && DECL_NAME (*to_p) == NULL_TREE 5155 && needs_to_live_in_memory (*to_p)) 5156 use_target = true; 5157 else if (is_gimple_reg_type (TREE_TYPE (*to_p)) 5158 || (DECL_P (*to_p) && DECL_REGISTER (*to_p))) 5159 /* Don't force regs into memory. */ 5160 use_target = false; 5161 else if (TREE_CODE (*expr_p) == INIT_EXPR) 5162 /* It's OK to use the target directly if it's being 5163 initialized. */ 5164 use_target = true; 5165 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p))) 5166 != INTEGER_CST) 5167 /* Always use the target and thus RSO for variable-sized types. 5168 GIMPLE cannot deal with a variable-sized assignment 5169 embedded in a call statement. */ 5170 use_target = true; 5171 else if (TREE_CODE (*to_p) != SSA_NAME 5172 && (!is_gimple_variable (*to_p) 5173 || needs_to_live_in_memory (*to_p))) 5174 /* Don't use the original target if it's already addressable; 5175 if its address escapes, and the called function uses the 5176 NRV optimization, a conforming program could see *to_p 5177 change before the called function returns; see c++/19317. 5178 When optimizing, the return_slot pass marks more functions 5179 as safe after we have escape info. */ 5180 use_target = false; 5181 else 5182 use_target = true; 5183 5184 if (use_target) 5185 { 5186 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1; 5187 mark_addressable (*to_p); 5188 } 5189 } 5190 break; 5191 5192 case WITH_SIZE_EXPR: 5193 /* Likewise for calls that return an aggregate of non-constant size, 5194 since we would not be able to generate a temporary at all. */ 5195 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR) 5196 { 5197 *from_p = TREE_OPERAND (*from_p, 0); 5198 /* We don't change ret in this case because the 5199 WITH_SIZE_EXPR might have been added in 5200 gimplify_modify_expr, so returning GS_OK would lead to an 5201 infinite loop. */ 5202 changed = true; 5203 } 5204 break; 5205 5206 /* If we're initializing from a container, push the initialization 5207 inside it. */ 5208 case CLEANUP_POINT_EXPR: 5209 case BIND_EXPR: 5210 case STATEMENT_LIST: 5211 { 5212 tree wrap = *from_p; 5213 tree t; 5214 5215 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval, 5216 fb_lvalue); 5217 if (ret != GS_ERROR) 5218 ret = GS_OK; 5219 5220 t = voidify_wrapper_expr (wrap, *expr_p); 5221 gcc_assert (t == *expr_p); 5222 5223 if (want_value) 5224 { 5225 gimplify_and_add (wrap, pre_p); 5226 *expr_p = unshare_expr (*to_p); 5227 } 5228 else 5229 *expr_p = wrap; 5230 return GS_OK; 5231 } 5232 5233 case COMPOUND_LITERAL_EXPR: 5234 { 5235 tree complit = TREE_OPERAND (*expr_p, 1); 5236 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit); 5237 tree decl = DECL_EXPR_DECL (decl_s); 5238 tree init = DECL_INITIAL (decl); 5239 5240 /* struct T x = (struct T) { 0, 1, 2 } can be optimized 5241 into struct T x = { 0, 1, 2 } if the address of the 5242 compound literal has never been taken. */ 5243 if (!TREE_ADDRESSABLE (complit) 5244 && !TREE_ADDRESSABLE (decl) 5245 && init) 5246 { 5247 *expr_p = copy_node (*expr_p); 5248 TREE_OPERAND (*expr_p, 1) = init; 5249 return GS_OK; 5250 } 5251 } 5252 5253 default: 5254 break; 5255 } 5256 } 5257 while (changed); 5258 5259 return ret; 5260 } 5261 5262 5263 /* Return true if T looks like a valid GIMPLE statement. */ 5264 5265 static bool 5266 is_gimple_stmt (tree t) 5267 { 5268 const enum tree_code code = TREE_CODE (t); 5269 5270 switch (code) 5271 { 5272 case NOP_EXPR: 5273 /* The only valid NOP_EXPR is the empty statement. */ 5274 return IS_EMPTY_STMT (t); 5275 5276 case BIND_EXPR: 5277 case COND_EXPR: 5278 /* These are only valid if they're void. */ 5279 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t)); 5280 5281 case SWITCH_EXPR: 5282 case GOTO_EXPR: 5283 case RETURN_EXPR: 5284 case LABEL_EXPR: 5285 case CASE_LABEL_EXPR: 5286 case TRY_CATCH_EXPR: 5287 case TRY_FINALLY_EXPR: 5288 case EH_FILTER_EXPR: 5289 case CATCH_EXPR: 5290 case ASM_EXPR: 5291 case STATEMENT_LIST: 5292 case OACC_PARALLEL: 5293 case OACC_KERNELS: 5294 case OACC_DATA: 5295 case OACC_HOST_DATA: 5296 case OACC_DECLARE: 5297 case OACC_UPDATE: 5298 case OACC_ENTER_DATA: 5299 case OACC_EXIT_DATA: 5300 case OACC_CACHE: 5301 case OMP_PARALLEL: 5302 case OMP_FOR: 5303 case OMP_SIMD: 5304 case CILK_SIMD: 5305 case OMP_DISTRIBUTE: 5306 case OACC_LOOP: 5307 case OMP_SECTIONS: 5308 case OMP_SECTION: 5309 case OMP_SINGLE: 5310 case OMP_MASTER: 5311 case OMP_TASKGROUP: 5312 case OMP_ORDERED: 5313 case OMP_CRITICAL: 5314 case OMP_TASK: 5315 case OMP_TARGET: 5316 case OMP_TARGET_DATA: 5317 case OMP_TARGET_UPDATE: 5318 case OMP_TARGET_ENTER_DATA: 5319 case OMP_TARGET_EXIT_DATA: 5320 case OMP_TASKLOOP: 5321 case OMP_TEAMS: 5322 /* These are always void. */ 5323 return true; 5324 5325 case CALL_EXPR: 5326 case MODIFY_EXPR: 5327 case PREDICT_EXPR: 5328 /* These are valid regardless of their type. */ 5329 return true; 5330 5331 default: 5332 return false; 5333 } 5334 } 5335 5336 5337 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is 5338 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with 5339 DECL_GIMPLE_REG_P set. 5340 5341 IMPORTANT NOTE: This promotion is performed by introducing a load of the 5342 other, unmodified part of the complex object just before the total store. 5343 As a consequence, if the object is still uninitialized, an undefined value 5344 will be loaded into a register, which may result in a spurious exception 5345 if the register is floating-point and the value happens to be a signaling 5346 NaN for example. Then the fully-fledged complex operations lowering pass 5347 followed by a DCE pass are necessary in order to fix things up. */ 5348 5349 static enum gimplify_status 5350 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p, 5351 bool want_value) 5352 { 5353 enum tree_code code, ocode; 5354 tree lhs, rhs, new_rhs, other, realpart, imagpart; 5355 5356 lhs = TREE_OPERAND (*expr_p, 0); 5357 rhs = TREE_OPERAND (*expr_p, 1); 5358 code = TREE_CODE (lhs); 5359 lhs = TREE_OPERAND (lhs, 0); 5360 5361 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR; 5362 other = build1 (ocode, TREE_TYPE (rhs), lhs); 5363 TREE_NO_WARNING (other) = 1; 5364 other = get_formal_tmp_var (other, pre_p); 5365 5366 realpart = code == REALPART_EXPR ? rhs : other; 5367 imagpart = code == REALPART_EXPR ? other : rhs; 5368 5369 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart)) 5370 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart); 5371 else 5372 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart); 5373 5374 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs)); 5375 *expr_p = (want_value) ? rhs : NULL_TREE; 5376 5377 return GS_ALL_DONE; 5378 } 5379 5380 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P. 5381 5382 modify_expr 5383 : varname '=' rhs 5384 | '*' ID '=' rhs 5385 5386 PRE_P points to the list where side effects that must happen before 5387 *EXPR_P should be stored. 5388 5389 POST_P points to the list where side effects that must happen after 5390 *EXPR_P should be stored. 5391 5392 WANT_VALUE is nonzero iff we want to use the value of this expression 5393 in another expression. */ 5394 5395 static enum gimplify_status 5396 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 5397 bool want_value) 5398 { 5399 tree *from_p = &TREE_OPERAND (*expr_p, 1); 5400 tree *to_p = &TREE_OPERAND (*expr_p, 0); 5401 enum gimplify_status ret = GS_UNHANDLED; 5402 gimple *assign; 5403 location_t loc = EXPR_LOCATION (*expr_p); 5404 gimple_stmt_iterator gsi; 5405 5406 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR 5407 || TREE_CODE (*expr_p) == INIT_EXPR); 5408 5409 /* Trying to simplify a clobber using normal logic doesn't work, 5410 so handle it here. */ 5411 if (TREE_CLOBBER_P (*from_p)) 5412 { 5413 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue); 5414 if (ret == GS_ERROR) 5415 return ret; 5416 gcc_assert (!want_value 5417 && (VAR_P (*to_p) || TREE_CODE (*to_p) == MEM_REF)); 5418 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p)); 5419 *expr_p = NULL; 5420 return GS_ALL_DONE; 5421 } 5422 5423 /* Insert pointer conversions required by the middle-end that are not 5424 required by the frontend. This fixes middle-end type checking for 5425 for example gcc.dg/redecl-6.c. */ 5426 if (POINTER_TYPE_P (TREE_TYPE (*to_p))) 5427 { 5428 STRIP_USELESS_TYPE_CONVERSION (*from_p); 5429 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p))) 5430 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p); 5431 } 5432 5433 /* See if any simplifications can be done based on what the RHS is. */ 5434 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p, 5435 want_value); 5436 if (ret != GS_UNHANDLED) 5437 return ret; 5438 5439 /* For zero sized types only gimplify the left hand side and right hand 5440 side as statements and throw away the assignment. Do this after 5441 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable 5442 types properly. */ 5443 if (zero_sized_type (TREE_TYPE (*from_p)) 5444 && !want_value 5445 /* Don't do this for calls that return addressable types, expand_call 5446 relies on those having a lhs. */ 5447 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p)) 5448 && TREE_CODE (*from_p) == CALL_EXPR)) 5449 { 5450 gimplify_stmt (from_p, pre_p); 5451 gimplify_stmt (to_p, pre_p); 5452 *expr_p = NULL_TREE; 5453 return GS_ALL_DONE; 5454 } 5455 5456 /* If the value being copied is of variable width, compute the length 5457 of the copy into a WITH_SIZE_EXPR. Note that we need to do this 5458 before gimplifying any of the operands so that we can resolve any 5459 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses 5460 the size of the expression to be copied, not of the destination, so 5461 that is what we must do here. */ 5462 maybe_with_size_expr (from_p); 5463 5464 /* As a special case, we have to temporarily allow for assignments 5465 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is 5466 a toplevel statement, when gimplifying the GENERIC expression 5467 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple 5468 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>. 5469 5470 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To 5471 prevent gimplify_expr from trying to create a new temporary for 5472 foo's LHS, we tell it that it should only gimplify until it 5473 reaches the CALL_EXPR. On return from gimplify_expr, the newly 5474 created GIMPLE_CALL <foo> will be the last statement in *PRE_P 5475 and all we need to do here is set 'a' to be its LHS. */ 5476 5477 /* Gimplify the RHS first for C++17 and bug 71104. */ 5478 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p); 5479 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue); 5480 if (ret == GS_ERROR) 5481 return ret; 5482 5483 /* Then gimplify the LHS. */ 5484 /* If we gimplified the RHS to a CALL_EXPR and that call may return 5485 twice we have to make sure to gimplify into non-SSA as otherwise 5486 the abnormal edge added later will make those defs not dominate 5487 their uses. 5488 ??? Technically this applies only to the registers used in the 5489 resulting non-register *TO_P. */ 5490 bool saved_into_ssa = gimplify_ctxp->into_ssa; 5491 if (saved_into_ssa 5492 && TREE_CODE (*from_p) == CALL_EXPR 5493 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE) 5494 gimplify_ctxp->into_ssa = false; 5495 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue); 5496 gimplify_ctxp->into_ssa = saved_into_ssa; 5497 if (ret == GS_ERROR) 5498 return ret; 5499 5500 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial 5501 guess for the predicate was wrong. */ 5502 gimple_predicate final_pred = rhs_predicate_for (*to_p); 5503 if (final_pred != initial_pred) 5504 { 5505 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue); 5506 if (ret == GS_ERROR) 5507 return ret; 5508 } 5509 5510 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type 5511 size as argument to the call. */ 5512 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR) 5513 { 5514 tree call = TREE_OPERAND (*from_p, 0); 5515 tree vlasize = TREE_OPERAND (*from_p, 1); 5516 5517 if (TREE_CODE (call) == CALL_EXPR 5518 && CALL_EXPR_IFN (call) == IFN_VA_ARG) 5519 { 5520 int nargs = call_expr_nargs (call); 5521 tree type = TREE_TYPE (call); 5522 tree ap = CALL_EXPR_ARG (call, 0); 5523 tree tag = CALL_EXPR_ARG (call, 1); 5524 tree aptag = CALL_EXPR_ARG (call, 2); 5525 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call), 5526 IFN_VA_ARG, type, 5527 nargs + 1, ap, tag, 5528 aptag, vlasize); 5529 TREE_OPERAND (*from_p, 0) = newcall; 5530 } 5531 } 5532 5533 /* Now see if the above changed *from_p to something we handle specially. */ 5534 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p, 5535 want_value); 5536 if (ret != GS_UNHANDLED) 5537 return ret; 5538 5539 /* If we've got a variable sized assignment between two lvalues (i.e. does 5540 not involve a call), then we can make things a bit more straightforward 5541 by converting the assignment to memcpy or memset. */ 5542 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR) 5543 { 5544 tree from = TREE_OPERAND (*from_p, 0); 5545 tree size = TREE_OPERAND (*from_p, 1); 5546 5547 if (TREE_CODE (from) == CONSTRUCTOR) 5548 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p); 5549 5550 if (is_gimple_addressable (from)) 5551 { 5552 *from_p = from; 5553 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value, 5554 pre_p); 5555 } 5556 } 5557 5558 /* Transform partial stores to non-addressable complex variables into 5559 total stores. This allows us to use real instead of virtual operands 5560 for these variables, which improves optimization. */ 5561 if ((TREE_CODE (*to_p) == REALPART_EXPR 5562 || TREE_CODE (*to_p) == IMAGPART_EXPR) 5563 && is_gimple_reg (TREE_OPERAND (*to_p, 0))) 5564 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value); 5565 5566 /* Try to alleviate the effects of the gimplification creating artificial 5567 temporaries (see for example is_gimple_reg_rhs) on the debug info, but 5568 make sure not to create DECL_DEBUG_EXPR links across functions. */ 5569 if (!gimplify_ctxp->into_ssa 5570 && VAR_P (*from_p) 5571 && DECL_IGNORED_P (*from_p) 5572 && DECL_P (*to_p) 5573 && !DECL_IGNORED_P (*to_p) 5574 && decl_function_context (*to_p) == current_function_decl) 5575 { 5576 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p)) 5577 DECL_NAME (*from_p) 5578 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p))); 5579 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1; 5580 SET_DECL_DEBUG_EXPR (*from_p, *to_p); 5581 } 5582 5583 if (want_value && TREE_THIS_VOLATILE (*to_p)) 5584 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p); 5585 5586 if (TREE_CODE (*from_p) == CALL_EXPR) 5587 { 5588 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL 5589 instead of a GIMPLE_ASSIGN. */ 5590 gcall *call_stmt; 5591 if (CALL_EXPR_FN (*from_p) == NULL_TREE) 5592 { 5593 /* Gimplify internal functions created in the FEs. */ 5594 int nargs = call_expr_nargs (*from_p), i; 5595 enum internal_fn ifn = CALL_EXPR_IFN (*from_p); 5596 auto_vec<tree> vargs (nargs); 5597 5598 for (i = 0; i < nargs; i++) 5599 { 5600 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p, 5601 EXPR_LOCATION (*from_p)); 5602 vargs.quick_push (CALL_EXPR_ARG (*from_p, i)); 5603 } 5604 call_stmt = gimple_build_call_internal_vec (ifn, vargs); 5605 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p)); 5606 } 5607 else 5608 { 5609 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p)); 5610 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0); 5611 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p)); 5612 tree fndecl = get_callee_fndecl (*from_p); 5613 if (fndecl 5614 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL 5615 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT 5616 && call_expr_nargs (*from_p) == 3) 5617 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3, 5618 CALL_EXPR_ARG (*from_p, 0), 5619 CALL_EXPR_ARG (*from_p, 1), 5620 CALL_EXPR_ARG (*from_p, 2)); 5621 else 5622 { 5623 call_stmt = gimple_build_call_from_tree (*from_p); 5624 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype)); 5625 } 5626 } 5627 notice_special_calls (call_stmt); 5628 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p)) 5629 gimple_call_set_lhs (call_stmt, *to_p); 5630 else if (TREE_CODE (*to_p) == SSA_NAME) 5631 /* The above is somewhat premature, avoid ICEing later for a 5632 SSA name w/o a definition. We may have uses in the GIMPLE IL. 5633 ??? This doesn't make it a default-def. */ 5634 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop (); 5635 assign = call_stmt; 5636 } 5637 else 5638 { 5639 assign = gimple_build_assign (*to_p, *from_p); 5640 gimple_set_location (assign, EXPR_LOCATION (*expr_p)); 5641 if (COMPARISON_CLASS_P (*from_p)) 5642 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p)); 5643 } 5644 5645 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p)) 5646 { 5647 /* We should have got an SSA name from the start. */ 5648 gcc_assert (TREE_CODE (*to_p) == SSA_NAME 5649 || ! gimple_in_ssa_p (cfun)); 5650 } 5651 5652 gimplify_seq_add_stmt (pre_p, assign); 5653 gsi = gsi_last (*pre_p); 5654 maybe_fold_stmt (&gsi); 5655 5656 if (want_value) 5657 { 5658 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p); 5659 return GS_OK; 5660 } 5661 else 5662 *expr_p = NULL; 5663 5664 return GS_ALL_DONE; 5665 } 5666 5667 /* Gimplify a comparison between two variable-sized objects. Do this 5668 with a call to BUILT_IN_MEMCMP. */ 5669 5670 static enum gimplify_status 5671 gimplify_variable_sized_compare (tree *expr_p) 5672 { 5673 location_t loc = EXPR_LOCATION (*expr_p); 5674 tree op0 = TREE_OPERAND (*expr_p, 0); 5675 tree op1 = TREE_OPERAND (*expr_p, 1); 5676 tree t, arg, dest, src, expr; 5677 5678 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0)); 5679 arg = unshare_expr (arg); 5680 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0); 5681 src = build_fold_addr_expr_loc (loc, op1); 5682 dest = build_fold_addr_expr_loc (loc, op0); 5683 t = builtin_decl_implicit (BUILT_IN_MEMCMP); 5684 t = build_call_expr_loc (loc, t, 3, dest, src, arg); 5685 5686 expr 5687 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node); 5688 SET_EXPR_LOCATION (expr, loc); 5689 *expr_p = expr; 5690 5691 return GS_OK; 5692 } 5693 5694 /* Gimplify a comparison between two aggregate objects of integral scalar 5695 mode as a comparison between the bitwise equivalent scalar values. */ 5696 5697 static enum gimplify_status 5698 gimplify_scalar_mode_aggregate_compare (tree *expr_p) 5699 { 5700 location_t loc = EXPR_LOCATION (*expr_p); 5701 tree op0 = TREE_OPERAND (*expr_p, 0); 5702 tree op1 = TREE_OPERAND (*expr_p, 1); 5703 5704 tree type = TREE_TYPE (op0); 5705 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1); 5706 5707 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0); 5708 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1); 5709 5710 *expr_p 5711 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1); 5712 5713 return GS_OK; 5714 } 5715 5716 /* Gimplify an expression sequence. This function gimplifies each 5717 expression and rewrites the original expression with the last 5718 expression of the sequence in GIMPLE form. 5719 5720 PRE_P points to the list where the side effects for all the 5721 expressions in the sequence will be emitted. 5722 5723 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */ 5724 5725 static enum gimplify_status 5726 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) 5727 { 5728 tree t = *expr_p; 5729 5730 do 5731 { 5732 tree *sub_p = &TREE_OPERAND (t, 0); 5733 5734 if (TREE_CODE (*sub_p) == COMPOUND_EXPR) 5735 gimplify_compound_expr (sub_p, pre_p, false); 5736 else 5737 gimplify_stmt (sub_p, pre_p); 5738 5739 t = TREE_OPERAND (t, 1); 5740 } 5741 while (TREE_CODE (t) == COMPOUND_EXPR); 5742 5743 *expr_p = t; 5744 if (want_value) 5745 return GS_OK; 5746 else 5747 { 5748 gimplify_stmt (expr_p, pre_p); 5749 return GS_ALL_DONE; 5750 } 5751 } 5752 5753 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to 5754 gimplify. After gimplification, EXPR_P will point to a new temporary 5755 that holds the original value of the SAVE_EXPR node. 5756 5757 PRE_P points to the list where side effects that must happen before 5758 *EXPR_P should be stored. */ 5759 5760 static enum gimplify_status 5761 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 5762 { 5763 enum gimplify_status ret = GS_ALL_DONE; 5764 tree val; 5765 5766 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR); 5767 val = TREE_OPERAND (*expr_p, 0); 5768 5769 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */ 5770 if (!SAVE_EXPR_RESOLVED_P (*expr_p)) 5771 { 5772 /* The operand may be a void-valued expression such as SAVE_EXPRs 5773 generated by the Java frontend for class initialization. It is 5774 being executed only for its side-effects. */ 5775 if (TREE_TYPE (val) == void_type_node) 5776 { 5777 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 5778 is_gimple_stmt, fb_none); 5779 val = NULL; 5780 } 5781 else 5782 /* The temporary may not be an SSA name as later abnormal and EH 5783 control flow may invalidate use/def domination. When in SSA 5784 form then assume there are no such issues and SAVE_EXPRs only 5785 appear via GENERIC foldings. */ 5786 val = get_initialized_tmp_var (val, pre_p, post_p, 5787 gimple_in_ssa_p (cfun)); 5788 5789 TREE_OPERAND (*expr_p, 0) = val; 5790 SAVE_EXPR_RESOLVED_P (*expr_p) = 1; 5791 } 5792 5793 *expr_p = val; 5794 5795 return ret; 5796 } 5797 5798 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P 5799 5800 unary_expr 5801 : ... 5802 | '&' varname 5803 ... 5804 5805 PRE_P points to the list where side effects that must happen before 5806 *EXPR_P should be stored. 5807 5808 POST_P points to the list where side effects that must happen after 5809 *EXPR_P should be stored. */ 5810 5811 static enum gimplify_status 5812 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 5813 { 5814 tree expr = *expr_p; 5815 tree op0 = TREE_OPERAND (expr, 0); 5816 enum gimplify_status ret; 5817 location_t loc = EXPR_LOCATION (*expr_p); 5818 5819 switch (TREE_CODE (op0)) 5820 { 5821 case INDIRECT_REF: 5822 do_indirect_ref: 5823 /* Check if we are dealing with an expression of the form '&*ptr'. 5824 While the front end folds away '&*ptr' into 'ptr', these 5825 expressions may be generated internally by the compiler (e.g., 5826 builtins like __builtin_va_end). */ 5827 /* Caution: the silent array decomposition semantics we allow for 5828 ADDR_EXPR means we can't always discard the pair. */ 5829 /* Gimplification of the ADDR_EXPR operand may drop 5830 cv-qualification conversions, so make sure we add them if 5831 needed. */ 5832 { 5833 tree op00 = TREE_OPERAND (op0, 0); 5834 tree t_expr = TREE_TYPE (expr); 5835 tree t_op00 = TREE_TYPE (op00); 5836 5837 if (!useless_type_conversion_p (t_expr, t_op00)) 5838 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00); 5839 *expr_p = op00; 5840 ret = GS_OK; 5841 } 5842 break; 5843 5844 case VIEW_CONVERT_EXPR: 5845 /* Take the address of our operand and then convert it to the type of 5846 this ADDR_EXPR. 5847 5848 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at 5849 all clear. The impact of this transformation is even less clear. */ 5850 5851 /* If the operand is a useless conversion, look through it. Doing so 5852 guarantees that the ADDR_EXPR and its operand will remain of the 5853 same type. */ 5854 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0))) 5855 op0 = TREE_OPERAND (op0, 0); 5856 5857 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr), 5858 build_fold_addr_expr_loc (loc, 5859 TREE_OPERAND (op0, 0))); 5860 ret = GS_OK; 5861 break; 5862 5863 case MEM_REF: 5864 if (integer_zerop (TREE_OPERAND (op0, 1))) 5865 goto do_indirect_ref; 5866 5867 /* fall through */ 5868 5869 default: 5870 /* If we see a call to a declared builtin or see its address 5871 being taken (we can unify those cases here) then we can mark 5872 the builtin for implicit generation by GCC. */ 5873 if (TREE_CODE (op0) == FUNCTION_DECL 5874 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL 5875 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0))) 5876 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true); 5877 5878 /* We use fb_either here because the C frontend sometimes takes 5879 the address of a call that returns a struct; see 5880 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make 5881 the implied temporary explicit. */ 5882 5883 /* Make the operand addressable. */ 5884 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p, 5885 is_gimple_addressable, fb_either); 5886 if (ret == GS_ERROR) 5887 break; 5888 5889 /* Then mark it. Beware that it may not be possible to do so directly 5890 if a temporary has been created by the gimplification. */ 5891 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p); 5892 5893 op0 = TREE_OPERAND (expr, 0); 5894 5895 /* For various reasons, the gimplification of the expression 5896 may have made a new INDIRECT_REF. */ 5897 if (TREE_CODE (op0) == INDIRECT_REF) 5898 goto do_indirect_ref; 5899 5900 mark_addressable (TREE_OPERAND (expr, 0)); 5901 5902 /* The FEs may end up building ADDR_EXPRs early on a decl with 5903 an incomplete type. Re-build ADDR_EXPRs in canonical form 5904 here. */ 5905 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr)))) 5906 *expr_p = build_fold_addr_expr (op0); 5907 5908 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */ 5909 recompute_tree_invariant_for_addr_expr (*expr_p); 5910 5911 /* If we re-built the ADDR_EXPR add a conversion to the original type 5912 if required. */ 5913 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p))) 5914 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p); 5915 5916 break; 5917 } 5918 5919 return ret; 5920 } 5921 5922 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple 5923 value; output operands should be a gimple lvalue. */ 5924 5925 static enum gimplify_status 5926 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 5927 { 5928 tree expr; 5929 int noutputs; 5930 const char **oconstraints; 5931 int i; 5932 tree link; 5933 const char *constraint; 5934 bool allows_mem, allows_reg, is_inout; 5935 enum gimplify_status ret, tret; 5936 gasm *stmt; 5937 vec<tree, va_gc> *inputs; 5938 vec<tree, va_gc> *outputs; 5939 vec<tree, va_gc> *clobbers; 5940 vec<tree, va_gc> *labels; 5941 tree link_next; 5942 5943 expr = *expr_p; 5944 noutputs = list_length (ASM_OUTPUTS (expr)); 5945 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); 5946 5947 inputs = NULL; 5948 outputs = NULL; 5949 clobbers = NULL; 5950 labels = NULL; 5951 5952 ret = GS_ALL_DONE; 5953 link_next = NULL_TREE; 5954 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next) 5955 { 5956 bool ok; 5957 size_t constraint_len; 5958 5959 link_next = TREE_CHAIN (link); 5960 5961 oconstraints[i] 5962 = constraint 5963 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 5964 constraint_len = strlen (constraint); 5965 if (constraint_len == 0) 5966 continue; 5967 5968 ok = parse_output_constraint (&constraint, i, 0, 0, 5969 &allows_mem, &allows_reg, &is_inout); 5970 if (!ok) 5971 { 5972 ret = GS_ERROR; 5973 is_inout = false; 5974 } 5975 5976 /* If we can't make copies, we can only accept memory. */ 5977 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link)))) 5978 { 5979 if (allows_mem) 5980 allows_reg = 0; 5981 else 5982 { 5983 error ("impossible constraint in %<asm%>"); 5984 error ("non-memory output %d must stay in memory", i); 5985 return GS_ERROR; 5986 } 5987 } 5988 5989 if (!allows_reg && allows_mem) 5990 mark_addressable (TREE_VALUE (link)); 5991 5992 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, 5993 is_inout ? is_gimple_min_lval : is_gimple_lvalue, 5994 fb_lvalue | fb_mayfail); 5995 if (tret == GS_ERROR) 5996 { 5997 error ("invalid lvalue in asm output %d", i); 5998 ret = tret; 5999 } 6000 6001 /* If the constraint does not allow memory make sure we gimplify 6002 it to a register if it is not already but its base is. This 6003 happens for complex and vector components. */ 6004 if (!allows_mem) 6005 { 6006 tree op = TREE_VALUE (link); 6007 if (! is_gimple_val (op) 6008 && is_gimple_reg_type (TREE_TYPE (op)) 6009 && is_gimple_reg (get_base_address (op))) 6010 { 6011 tree tem = create_tmp_reg (TREE_TYPE (op)); 6012 tree ass; 6013 if (is_inout) 6014 { 6015 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), 6016 tem, unshare_expr (op)); 6017 gimplify_and_add (ass, pre_p); 6018 } 6019 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem); 6020 gimplify_and_add (ass, post_p); 6021 6022 TREE_VALUE (link) = tem; 6023 tret = GS_OK; 6024 } 6025 } 6026 6027 vec_safe_push (outputs, link); 6028 TREE_CHAIN (link) = NULL_TREE; 6029 6030 if (is_inout) 6031 { 6032 /* An input/output operand. To give the optimizers more 6033 flexibility, split it into separate input and output 6034 operands. */ 6035 tree input; 6036 /* Buffer big enough to format a 32-bit UINT_MAX into. */ 6037 char buf[11]; 6038 6039 /* Turn the in/out constraint into an output constraint. */ 6040 char *p = xstrdup (constraint); 6041 p[0] = '='; 6042 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p); 6043 6044 /* And add a matching input constraint. */ 6045 if (allows_reg) 6046 { 6047 sprintf (buf, "%u", i); 6048 6049 /* If there are multiple alternatives in the constraint, 6050 handle each of them individually. Those that allow register 6051 will be replaced with operand number, the others will stay 6052 unchanged. */ 6053 if (strchr (p, ',') != NULL) 6054 { 6055 size_t len = 0, buflen = strlen (buf); 6056 char *beg, *end, *str, *dst; 6057 6058 for (beg = p + 1;;) 6059 { 6060 end = strchr (beg, ','); 6061 if (end == NULL) 6062 end = strchr (beg, '\0'); 6063 if ((size_t) (end - beg) < buflen) 6064 len += buflen + 1; 6065 else 6066 len += end - beg + 1; 6067 if (*end) 6068 beg = end + 1; 6069 else 6070 break; 6071 } 6072 6073 str = (char *) alloca (len); 6074 for (beg = p + 1, dst = str;;) 6075 { 6076 const char *tem; 6077 bool mem_p, reg_p, inout_p; 6078 6079 end = strchr (beg, ','); 6080 if (end) 6081 *end = '\0'; 6082 beg[-1] = '='; 6083 tem = beg - 1; 6084 parse_output_constraint (&tem, i, 0, 0, 6085 &mem_p, ®_p, &inout_p); 6086 if (dst != str) 6087 *dst++ = ','; 6088 if (reg_p) 6089 { 6090 memcpy (dst, buf, buflen); 6091 dst += buflen; 6092 } 6093 else 6094 { 6095 if (end) 6096 len = end - beg; 6097 else 6098 len = strlen (beg); 6099 memcpy (dst, beg, len); 6100 dst += len; 6101 } 6102 if (end) 6103 beg = end + 1; 6104 else 6105 break; 6106 } 6107 *dst = '\0'; 6108 input = build_string (dst - str, str); 6109 } 6110 else 6111 input = build_string (strlen (buf), buf); 6112 } 6113 else 6114 input = build_string (constraint_len - 1, constraint + 1); 6115 6116 free (p); 6117 6118 input = build_tree_list (build_tree_list (NULL_TREE, input), 6119 unshare_expr (TREE_VALUE (link))); 6120 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input); 6121 } 6122 } 6123 6124 link_next = NULL_TREE; 6125 for (link = ASM_INPUTS (expr); link; ++i, link = link_next) 6126 { 6127 link_next = TREE_CHAIN (link); 6128 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 6129 parse_input_constraint (&constraint, 0, 0, noutputs, 0, 6130 oconstraints, &allows_mem, &allows_reg); 6131 6132 /* If we can't make copies, we can only accept memory. */ 6133 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link)))) 6134 { 6135 if (allows_mem) 6136 allows_reg = 0; 6137 else 6138 { 6139 error ("impossible constraint in %<asm%>"); 6140 error ("non-memory input %d must stay in memory", i); 6141 return GS_ERROR; 6142 } 6143 } 6144 6145 /* If the operand is a memory input, it should be an lvalue. */ 6146 if (!allows_reg && allows_mem) 6147 { 6148 tree inputv = TREE_VALUE (link); 6149 STRIP_NOPS (inputv); 6150 if (TREE_CODE (inputv) == PREDECREMENT_EXPR 6151 || TREE_CODE (inputv) == PREINCREMENT_EXPR 6152 || TREE_CODE (inputv) == POSTDECREMENT_EXPR 6153 || TREE_CODE (inputv) == POSTINCREMENT_EXPR 6154 || TREE_CODE (inputv) == MODIFY_EXPR) 6155 TREE_VALUE (link) = error_mark_node; 6156 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, 6157 is_gimple_lvalue, fb_lvalue | fb_mayfail); 6158 if (tret != GS_ERROR) 6159 { 6160 /* Unlike output operands, memory inputs are not guaranteed 6161 to be lvalues by the FE, and while the expressions are 6162 marked addressable there, if it is e.g. a statement 6163 expression, temporaries in it might not end up being 6164 addressable. They might be already used in the IL and thus 6165 it is too late to make them addressable now though. */ 6166 tree x = TREE_VALUE (link); 6167 while (handled_component_p (x)) 6168 x = TREE_OPERAND (x, 0); 6169 if (TREE_CODE (x) == MEM_REF 6170 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR) 6171 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0); 6172 if ((VAR_P (x) 6173 || TREE_CODE (x) == PARM_DECL 6174 || TREE_CODE (x) == RESULT_DECL) 6175 && !TREE_ADDRESSABLE (x) 6176 && is_gimple_reg (x)) 6177 { 6178 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), 6179 input_location), 0, 6180 "memory input %d is not directly addressable", 6181 i); 6182 prepare_gimple_addressable (&TREE_VALUE (link), pre_p); 6183 } 6184 } 6185 mark_addressable (TREE_VALUE (link)); 6186 if (tret == GS_ERROR) 6187 { 6188 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location), 6189 "memory input %d is not directly addressable", i); 6190 ret = tret; 6191 } 6192 } 6193 else 6194 { 6195 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, 6196 is_gimple_asm_val, fb_rvalue); 6197 if (tret == GS_ERROR) 6198 ret = tret; 6199 } 6200 6201 TREE_CHAIN (link) = NULL_TREE; 6202 vec_safe_push (inputs, link); 6203 } 6204 6205 link_next = NULL_TREE; 6206 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next) 6207 { 6208 link_next = TREE_CHAIN (link); 6209 TREE_CHAIN (link) = NULL_TREE; 6210 vec_safe_push (clobbers, link); 6211 } 6212 6213 link_next = NULL_TREE; 6214 for (link = ASM_LABELS (expr); link; ++i, link = link_next) 6215 { 6216 link_next = TREE_CHAIN (link); 6217 TREE_CHAIN (link) = NULL_TREE; 6218 vec_safe_push (labels, link); 6219 } 6220 6221 /* Do not add ASMs with errors to the gimple IL stream. */ 6222 if (ret != GS_ERROR) 6223 { 6224 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)), 6225 inputs, outputs, clobbers, labels); 6226 6227 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0); 6228 gimple_asm_set_input (stmt, ASM_INPUT_P (expr)); 6229 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr)); 6230 6231 gimplify_seq_add_stmt (pre_p, stmt); 6232 } 6233 6234 return ret; 6235 } 6236 6237 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding 6238 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while 6239 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we 6240 return to this function. 6241 6242 FIXME should we complexify the prequeue handling instead? Or use flags 6243 for all the cleanups and let the optimizer tighten them up? The current 6244 code seems pretty fragile; it will break on a cleanup within any 6245 non-conditional nesting. But any such nesting would be broken, anyway; 6246 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct 6247 and continues out of it. We can do that at the RTL level, though, so 6248 having an optimizer to tighten up try/finally regions would be a Good 6249 Thing. */ 6250 6251 static enum gimplify_status 6252 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p) 6253 { 6254 gimple_stmt_iterator iter; 6255 gimple_seq body_sequence = NULL; 6256 6257 tree temp = voidify_wrapper_expr (*expr_p, NULL); 6258 6259 /* We only care about the number of conditions between the innermost 6260 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and 6261 any cleanups collected outside the CLEANUP_POINT_EXPR. */ 6262 int old_conds = gimplify_ctxp->conditions; 6263 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups; 6264 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr; 6265 gimplify_ctxp->conditions = 0; 6266 gimplify_ctxp->conditional_cleanups = NULL; 6267 gimplify_ctxp->in_cleanup_point_expr = true; 6268 6269 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence); 6270 6271 gimplify_ctxp->conditions = old_conds; 6272 gimplify_ctxp->conditional_cleanups = old_cleanups; 6273 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr; 6274 6275 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); ) 6276 { 6277 gimple *wce = gsi_stmt (iter); 6278 6279 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR) 6280 { 6281 if (gsi_one_before_end_p (iter)) 6282 { 6283 /* Note that gsi_insert_seq_before and gsi_remove do not 6284 scan operands, unlike some other sequence mutators. */ 6285 if (!gimple_wce_cleanup_eh_only (wce)) 6286 gsi_insert_seq_before_without_update (&iter, 6287 gimple_wce_cleanup (wce), 6288 GSI_SAME_STMT); 6289 gsi_remove (&iter, true); 6290 break; 6291 } 6292 else 6293 { 6294 gtry *gtry; 6295 gimple_seq seq; 6296 enum gimple_try_flags kind; 6297 6298 if (gimple_wce_cleanup_eh_only (wce)) 6299 kind = GIMPLE_TRY_CATCH; 6300 else 6301 kind = GIMPLE_TRY_FINALLY; 6302 seq = gsi_split_seq_after (iter); 6303 6304 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind); 6305 /* Do not use gsi_replace here, as it may scan operands. 6306 We want to do a simple structural modification only. */ 6307 gsi_set_stmt (&iter, gtry); 6308 iter = gsi_start (gtry->eval); 6309 } 6310 } 6311 else 6312 gsi_next (&iter); 6313 } 6314 6315 gimplify_seq_add_seq (pre_p, body_sequence); 6316 if (temp) 6317 { 6318 *expr_p = temp; 6319 return GS_OK; 6320 } 6321 else 6322 { 6323 *expr_p = NULL; 6324 return GS_ALL_DONE; 6325 } 6326 } 6327 6328 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP 6329 is the cleanup action required. EH_ONLY is true if the cleanup should 6330 only be executed if an exception is thrown, not on normal exit. 6331 If FORCE_UNCOND is true perform the cleanup unconditionally; this is 6332 only valid for clobbers. */ 6333 6334 static void 6335 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p, 6336 bool force_uncond = false) 6337 { 6338 gimple *wce; 6339 gimple_seq cleanup_stmts = NULL; 6340 6341 /* Errors can result in improperly nested cleanups. Which results in 6342 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */ 6343 if (seen_error ()) 6344 return; 6345 6346 if (gimple_conditional_context ()) 6347 { 6348 /* If we're in a conditional context, this is more complex. We only 6349 want to run the cleanup if we actually ran the initialization that 6350 necessitates it, but we want to run it after the end of the 6351 conditional context. So we wrap the try/finally around the 6352 condition and use a flag to determine whether or not to actually 6353 run the destructor. Thus 6354 6355 test ? f(A()) : 0 6356 6357 becomes (approximately) 6358 6359 flag = 0; 6360 try { 6361 if (test) { A::A(temp); flag = 1; val = f(temp); } 6362 else { val = 0; } 6363 } finally { 6364 if (flag) A::~A(temp); 6365 } 6366 val 6367 */ 6368 if (force_uncond) 6369 { 6370 gimplify_stmt (&cleanup, &cleanup_stmts); 6371 wce = gimple_build_wce (cleanup_stmts); 6372 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce); 6373 } 6374 else 6375 { 6376 tree flag = create_tmp_var (boolean_type_node, "cleanup"); 6377 gassign *ffalse = gimple_build_assign (flag, boolean_false_node); 6378 gassign *ftrue = gimple_build_assign (flag, boolean_true_node); 6379 6380 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL); 6381 gimplify_stmt (&cleanup, &cleanup_stmts); 6382 wce = gimple_build_wce (cleanup_stmts); 6383 6384 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse); 6385 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce); 6386 gimplify_seq_add_stmt (pre_p, ftrue); 6387 6388 /* Because of this manipulation, and the EH edges that jump 6389 threading cannot redirect, the temporary (VAR) will appear 6390 to be used uninitialized. Don't warn. */ 6391 TREE_NO_WARNING (var) = 1; 6392 } 6393 } 6394 else 6395 { 6396 gimplify_stmt (&cleanup, &cleanup_stmts); 6397 wce = gimple_build_wce (cleanup_stmts); 6398 gimple_wce_set_cleanup_eh_only (wce, eh_only); 6399 gimplify_seq_add_stmt (pre_p, wce); 6400 } 6401 } 6402 6403 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */ 6404 6405 static enum gimplify_status 6406 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 6407 { 6408 tree targ = *expr_p; 6409 tree temp = TARGET_EXPR_SLOT (targ); 6410 tree init = TARGET_EXPR_INITIAL (targ); 6411 enum gimplify_status ret; 6412 6413 bool unpoison_empty_seq = false; 6414 gimple_stmt_iterator unpoison_it; 6415 6416 if (init) 6417 { 6418 tree cleanup = NULL_TREE; 6419 6420 /* TARGET_EXPR temps aren't part of the enclosing block, so add it 6421 to the temps list. Handle also variable length TARGET_EXPRs. */ 6422 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST) 6423 { 6424 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp))) 6425 gimplify_type_sizes (TREE_TYPE (temp), pre_p); 6426 gimplify_vla_decl (temp, pre_p); 6427 } 6428 else 6429 { 6430 /* Save location where we need to place unpoisoning. It's possible 6431 that a variable will be converted to needs_to_live_in_memory. */ 6432 unpoison_it = gsi_last (*pre_p); 6433 unpoison_empty_seq = gsi_end_p (unpoison_it); 6434 6435 gimple_add_tmp_var (temp); 6436 } 6437 6438 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the 6439 expression is supposed to initialize the slot. */ 6440 if (VOID_TYPE_P (TREE_TYPE (init))) 6441 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none); 6442 else 6443 { 6444 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init); 6445 init = init_expr; 6446 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none); 6447 init = NULL; 6448 ggc_free (init_expr); 6449 } 6450 if (ret == GS_ERROR) 6451 { 6452 /* PR c++/28266 Make sure this is expanded only once. */ 6453 TARGET_EXPR_INITIAL (targ) = NULL_TREE; 6454 return GS_ERROR; 6455 } 6456 if (init) 6457 gimplify_and_add (init, pre_p); 6458 6459 /* If needed, push the cleanup for the temp. */ 6460 if (TARGET_EXPR_CLEANUP (targ)) 6461 { 6462 if (CLEANUP_EH_ONLY (targ)) 6463 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ), 6464 CLEANUP_EH_ONLY (targ), pre_p); 6465 else 6466 cleanup = TARGET_EXPR_CLEANUP (targ); 6467 } 6468 6469 /* Add a clobber for the temporary going out of scope, like 6470 gimplify_bind_expr. */ 6471 if (gimplify_ctxp->in_cleanup_point_expr 6472 && needs_to_live_in_memory (temp)) 6473 { 6474 if (flag_stack_reuse == SR_ALL) 6475 { 6476 tree clobber = build_constructor (TREE_TYPE (temp), 6477 NULL); 6478 TREE_THIS_VOLATILE (clobber) = true; 6479 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber); 6480 gimple_push_cleanup (temp, clobber, false, pre_p, true); 6481 } 6482 if (asan_poisoned_variables && dbg_cnt (asan_use_after_scope) 6483 && !gimplify_omp_ctxp) 6484 { 6485 tree asan_cleanup = build_asan_poison_call_expr (temp); 6486 if (asan_cleanup) 6487 { 6488 if (unpoison_empty_seq) 6489 unpoison_it = gsi_start (*pre_p); 6490 6491 asan_poison_variable (temp, false, &unpoison_it, 6492 unpoison_empty_seq); 6493 gimple_push_cleanup (temp, asan_cleanup, false, pre_p); 6494 } 6495 } 6496 } 6497 if (cleanup) 6498 gimple_push_cleanup (temp, cleanup, false, pre_p); 6499 6500 /* Only expand this once. */ 6501 TREE_OPERAND (targ, 3) = init; 6502 TARGET_EXPR_INITIAL (targ) = NULL_TREE; 6503 } 6504 else 6505 /* We should have expanded this before. */ 6506 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp)); 6507 6508 *expr_p = temp; 6509 return GS_OK; 6510 } 6511 6512 /* Gimplification of expression trees. */ 6513 6514 /* Gimplify an expression which appears at statement context. The 6515 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is 6516 NULL, a new sequence is allocated. 6517 6518 Return true if we actually added a statement to the queue. */ 6519 6520 bool 6521 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p) 6522 { 6523 gimple_seq_node last; 6524 6525 last = gimple_seq_last (*seq_p); 6526 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none); 6527 return last != gimple_seq_last (*seq_p); 6528 } 6529 6530 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels 6531 to CTX. If entries already exist, force them to be some flavor of private. 6532 If there is no enclosing parallel, do nothing. */ 6533 6534 void 6535 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl) 6536 { 6537 splay_tree_node n; 6538 6539 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE) 6540 return; 6541 6542 do 6543 { 6544 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 6545 if (n != NULL) 6546 { 6547 if (n->value & GOVD_SHARED) 6548 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN); 6549 else if (n->value & GOVD_MAP) 6550 n->value |= GOVD_MAP_TO_ONLY; 6551 else 6552 return; 6553 } 6554 else if ((ctx->region_type & ORT_TARGET) != 0) 6555 { 6556 if (ctx->target_map_scalars_firstprivate) 6557 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE); 6558 else 6559 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY); 6560 } 6561 else if (ctx->region_type != ORT_WORKSHARE 6562 && ctx->region_type != ORT_SIMD 6563 && ctx->region_type != ORT_ACC 6564 && !(ctx->region_type & ORT_TARGET_DATA)) 6565 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE); 6566 6567 ctx = ctx->outer_context; 6568 } 6569 while (ctx); 6570 } 6571 6572 /* Similarly for each of the type sizes of TYPE. */ 6573 6574 static void 6575 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type) 6576 { 6577 if (type == NULL || type == error_mark_node) 6578 return; 6579 type = TYPE_MAIN_VARIANT (type); 6580 6581 if (ctx->privatized_types->add (type)) 6582 return; 6583 6584 switch (TREE_CODE (type)) 6585 { 6586 case INTEGER_TYPE: 6587 case ENUMERAL_TYPE: 6588 case BOOLEAN_TYPE: 6589 case REAL_TYPE: 6590 case FIXED_POINT_TYPE: 6591 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type)); 6592 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type)); 6593 break; 6594 6595 case ARRAY_TYPE: 6596 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type)); 6597 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type)); 6598 break; 6599 6600 case RECORD_TYPE: 6601 case UNION_TYPE: 6602 case QUAL_UNION_TYPE: 6603 { 6604 tree field; 6605 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) 6606 if (TREE_CODE (field) == FIELD_DECL) 6607 { 6608 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field)); 6609 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field)); 6610 } 6611 } 6612 break; 6613 6614 case POINTER_TYPE: 6615 case REFERENCE_TYPE: 6616 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type)); 6617 break; 6618 6619 default: 6620 break; 6621 } 6622 6623 omp_firstprivatize_variable (ctx, TYPE_SIZE (type)); 6624 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type)); 6625 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type); 6626 } 6627 6628 /* Add an entry for DECL in the OMP context CTX with FLAGS. */ 6629 6630 static void 6631 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags) 6632 { 6633 splay_tree_node n; 6634 unsigned int nflags; 6635 tree t; 6636 6637 if (error_operand_p (decl) || ctx->region_type == ORT_NONE) 6638 return; 6639 6640 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means 6641 there are constructors involved somewhere. Exception is a shared clause, 6642 there is nothing privatized in that case. */ 6643 if ((flags & GOVD_SHARED) == 0 6644 && (TREE_ADDRESSABLE (TREE_TYPE (decl)) 6645 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))) 6646 flags |= GOVD_SEEN; 6647 6648 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 6649 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0) 6650 { 6651 /* We shouldn't be re-adding the decl with the same data 6652 sharing class. */ 6653 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0); 6654 nflags = n->value | flags; 6655 /* The only combination of data sharing classes we should see is 6656 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits 6657 reduction variables to be used in data sharing clauses. */ 6658 gcc_assert ((ctx->region_type & ORT_ACC) != 0 6659 || ((nflags & GOVD_DATA_SHARE_CLASS) 6660 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)) 6661 || (flags & GOVD_DATA_SHARE_CLASS) == 0); 6662 n->value = nflags; 6663 return; 6664 } 6665 6666 /* When adding a variable-sized variable, we have to handle all sorts 6667 of additional bits of data: the pointer replacement variable, and 6668 the parameters of the type. */ 6669 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 6670 { 6671 /* Add the pointer replacement variable as PRIVATE if the variable 6672 replacement is private, else FIRSTPRIVATE since we'll need the 6673 address of the original variable either for SHARED, or for the 6674 copy into or out of the context. */ 6675 if (!(flags & GOVD_LOCAL)) 6676 { 6677 if (flags & GOVD_MAP) 6678 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT; 6679 else if (flags & GOVD_PRIVATE) 6680 nflags = GOVD_PRIVATE; 6681 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0 6682 && (flags & GOVD_FIRSTPRIVATE)) 6683 nflags = GOVD_PRIVATE | GOVD_EXPLICIT; 6684 else 6685 nflags = GOVD_FIRSTPRIVATE; 6686 nflags |= flags & GOVD_SEEN; 6687 t = DECL_VALUE_EXPR (decl); 6688 gcc_assert (TREE_CODE (t) == INDIRECT_REF); 6689 t = TREE_OPERAND (t, 0); 6690 gcc_assert (DECL_P (t)); 6691 omp_add_variable (ctx, t, nflags); 6692 } 6693 6694 /* Add all of the variable and type parameters (which should have 6695 been gimplified to a formal temporary) as FIRSTPRIVATE. */ 6696 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl)); 6697 omp_firstprivatize_variable (ctx, DECL_SIZE (decl)); 6698 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl)); 6699 6700 /* The variable-sized variable itself is never SHARED, only some form 6701 of PRIVATE. The sharing would take place via the pointer variable 6702 which we remapped above. */ 6703 if (flags & GOVD_SHARED) 6704 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE 6705 | (flags & (GOVD_SEEN | GOVD_EXPLICIT)); 6706 6707 /* We're going to make use of the TYPE_SIZE_UNIT at least in the 6708 alloca statement we generate for the variable, so make sure it 6709 is available. This isn't automatically needed for the SHARED 6710 case, since we won't be allocating local storage then. 6711 For local variables TYPE_SIZE_UNIT might not be gimplified yet, 6712 in this case omp_notice_variable will be called later 6713 on when it is gimplified. */ 6714 else if (! (flags & (GOVD_LOCAL | GOVD_MAP)) 6715 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl)))) 6716 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true); 6717 } 6718 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0 6719 && lang_hooks.decls.omp_privatize_by_reference (decl)) 6720 { 6721 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl)); 6722 6723 /* Similar to the direct variable sized case above, we'll need the 6724 size of references being privatized. */ 6725 if ((flags & GOVD_SHARED) == 0) 6726 { 6727 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))); 6728 if (DECL_P (t)) 6729 omp_notice_variable (ctx, t, true); 6730 } 6731 } 6732 6733 if (n != NULL) 6734 n->value |= flags; 6735 else 6736 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags); 6737 6738 /* For reductions clauses in OpenACC loop directives, by default create a 6739 copy clause on the enclosing parallel construct for carrying back the 6740 results. */ 6741 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION)) 6742 { 6743 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context; 6744 while (outer_ctx) 6745 { 6746 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl); 6747 if (n != NULL) 6748 { 6749 /* Ignore local variables and explicitly declared clauses. */ 6750 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT)) 6751 break; 6752 else if (outer_ctx->region_type == ORT_ACC_KERNELS) 6753 { 6754 /* According to the OpenACC spec, such a reduction variable 6755 should already have a copy map on a kernels construct, 6756 verify that here. */ 6757 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE) 6758 && (n->value & GOVD_MAP)); 6759 } 6760 else if (outer_ctx->region_type == ORT_ACC_PARALLEL) 6761 { 6762 /* Remove firstprivate and make it a copy map. */ 6763 n->value &= ~GOVD_FIRSTPRIVATE; 6764 n->value |= GOVD_MAP; 6765 } 6766 } 6767 else if (outer_ctx->region_type == ORT_ACC_PARALLEL) 6768 { 6769 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl, 6770 GOVD_MAP | GOVD_SEEN); 6771 break; 6772 } 6773 outer_ctx = outer_ctx->outer_context; 6774 } 6775 } 6776 } 6777 6778 /* Notice a threadprivate variable DECL used in OMP context CTX. 6779 This just prints out diagnostics about threadprivate variable uses 6780 in untied tasks. If DECL2 is non-NULL, prevent this warning 6781 on that variable. */ 6782 6783 static bool 6784 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl, 6785 tree decl2) 6786 { 6787 splay_tree_node n; 6788 struct gimplify_omp_ctx *octx; 6789 6790 for (octx = ctx; octx; octx = octx->outer_context) 6791 if ((octx->region_type & ORT_TARGET) != 0) 6792 { 6793 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl); 6794 if (n == NULL) 6795 { 6796 error ("threadprivate variable %qE used in target region", 6797 DECL_NAME (decl)); 6798 error_at (octx->location, "enclosing target region"); 6799 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0); 6800 } 6801 if (decl2) 6802 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0); 6803 } 6804 6805 if (ctx->region_type != ORT_UNTIED_TASK) 6806 return false; 6807 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 6808 if (n == NULL) 6809 { 6810 error ("threadprivate variable %qE used in untied task", 6811 DECL_NAME (decl)); 6812 error_at (ctx->location, "enclosing task"); 6813 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0); 6814 } 6815 if (decl2) 6816 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0); 6817 return false; 6818 } 6819 6820 /* Return true if global var DECL is device resident. */ 6821 6822 static bool 6823 device_resident_p (tree decl) 6824 { 6825 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl)); 6826 6827 if (!attr) 6828 return false; 6829 6830 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t)) 6831 { 6832 tree c = TREE_VALUE (t); 6833 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT) 6834 return true; 6835 } 6836 6837 return false; 6838 } 6839 6840 /* Return true if DECL has an ACC DECLARE attribute. */ 6841 6842 static bool 6843 is_oacc_declared (tree decl) 6844 { 6845 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl; 6846 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t)); 6847 return declared != NULL_TREE; 6848 } 6849 6850 /* Determine outer default flags for DECL mentioned in an OMP region 6851 but not declared in an enclosing clause. 6852 6853 ??? Some compiler-generated variables (like SAVE_EXPRs) could be 6854 remapped firstprivate instead of shared. To some extent this is 6855 addressed in omp_firstprivatize_type_sizes, but not 6856 effectively. */ 6857 6858 static unsigned 6859 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl, 6860 bool in_code, unsigned flags) 6861 { 6862 enum omp_clause_default_kind default_kind = ctx->default_kind; 6863 enum omp_clause_default_kind kind; 6864 6865 kind = lang_hooks.decls.omp_predetermined_sharing (decl); 6866 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED) 6867 default_kind = kind; 6868 6869 switch (default_kind) 6870 { 6871 case OMP_CLAUSE_DEFAULT_NONE: 6872 { 6873 const char *rtype; 6874 6875 if (ctx->region_type & ORT_PARALLEL) 6876 rtype = "parallel"; 6877 else if (ctx->region_type & ORT_TASK) 6878 rtype = "task"; 6879 else if (ctx->region_type & ORT_TEAMS) 6880 rtype = "teams"; 6881 else 6882 gcc_unreachable (); 6883 6884 error ("%qE not specified in enclosing %qs", 6885 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype); 6886 error_at (ctx->location, "enclosing %qs", rtype); 6887 } 6888 /* FALLTHRU */ 6889 case OMP_CLAUSE_DEFAULT_SHARED: 6890 flags |= GOVD_SHARED; 6891 break; 6892 case OMP_CLAUSE_DEFAULT_PRIVATE: 6893 flags |= GOVD_PRIVATE; 6894 break; 6895 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE: 6896 flags |= GOVD_FIRSTPRIVATE; 6897 break; 6898 case OMP_CLAUSE_DEFAULT_UNSPECIFIED: 6899 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */ 6900 gcc_assert ((ctx->region_type & ORT_TASK) != 0); 6901 if (struct gimplify_omp_ctx *octx = ctx->outer_context) 6902 { 6903 omp_notice_variable (octx, decl, in_code); 6904 for (; octx; octx = octx->outer_context) 6905 { 6906 splay_tree_node n2; 6907 6908 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl); 6909 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0 6910 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0)) 6911 continue; 6912 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED) 6913 { 6914 flags |= GOVD_FIRSTPRIVATE; 6915 goto found_outer; 6916 } 6917 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0) 6918 { 6919 flags |= GOVD_SHARED; 6920 goto found_outer; 6921 } 6922 } 6923 } 6924 6925 if (TREE_CODE (decl) == PARM_DECL 6926 || (!is_global_var (decl) 6927 && DECL_CONTEXT (decl) == current_function_decl)) 6928 flags |= GOVD_FIRSTPRIVATE; 6929 else 6930 flags |= GOVD_SHARED; 6931 found_outer: 6932 break; 6933 6934 default: 6935 gcc_unreachable (); 6936 } 6937 6938 return flags; 6939 } 6940 6941 6942 /* Determine outer default flags for DECL mentioned in an OACC region 6943 but not declared in an enclosing clause. */ 6944 6945 static unsigned 6946 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags) 6947 { 6948 const char *rkind; 6949 bool on_device = false; 6950 bool declared = is_oacc_declared (decl); 6951 tree type = TREE_TYPE (decl); 6952 6953 if (lang_hooks.decls.omp_privatize_by_reference (decl)) 6954 type = TREE_TYPE (type); 6955 6956 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0 6957 && is_global_var (decl) 6958 && device_resident_p (decl)) 6959 { 6960 on_device = true; 6961 flags |= GOVD_MAP_TO_ONLY; 6962 } 6963 6964 switch (ctx->region_type) 6965 { 6966 default: 6967 gcc_unreachable (); 6968 6969 case ORT_ACC_KERNELS: 6970 /* Scalars are default 'copy' under kernels, non-scalars are default 6971 'present_or_copy'. */ 6972 flags |= GOVD_MAP; 6973 if (!AGGREGATE_TYPE_P (type)) 6974 flags |= GOVD_MAP_FORCE; 6975 6976 rkind = "kernels"; 6977 break; 6978 6979 case ORT_ACC_PARALLEL: 6980 { 6981 if (on_device || AGGREGATE_TYPE_P (type) || declared) 6982 /* Aggregates default to 'present_or_copy'. */ 6983 flags |= GOVD_MAP; 6984 else 6985 /* Scalars default to 'firstprivate'. */ 6986 flags |= GOVD_FIRSTPRIVATE; 6987 rkind = "parallel"; 6988 } 6989 break; 6990 } 6991 6992 if (DECL_ARTIFICIAL (decl)) 6993 ; /* We can get compiler-generated decls, and should not complain 6994 about them. */ 6995 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE) 6996 { 6997 error ("%qE not specified in enclosing OpenACC %qs construct", 6998 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind); 6999 inform (ctx->location, "enclosing OpenACC %qs construct", rkind); 7000 } 7001 else 7002 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED); 7003 7004 return flags; 7005 } 7006 7007 /* Record the fact that DECL was used within the OMP context CTX. 7008 IN_CODE is true when real code uses DECL, and false when we should 7009 merely emit default(none) errors. Return true if DECL is going to 7010 be remapped and thus DECL shouldn't be gimplified into its 7011 DECL_VALUE_EXPR (if any). */ 7012 7013 static bool 7014 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code) 7015 { 7016 splay_tree_node n; 7017 unsigned flags = in_code ? GOVD_SEEN : 0; 7018 bool ret = false, shared; 7019 7020 if (error_operand_p (decl)) 7021 return false; 7022 7023 if (ctx->region_type == ORT_NONE) 7024 return lang_hooks.decls.omp_disregard_value_expr (decl, false); 7025 7026 if (is_global_var (decl)) 7027 { 7028 /* Threadprivate variables are predetermined. */ 7029 if (DECL_THREAD_LOCAL_P (decl)) 7030 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE); 7031 7032 if (DECL_HAS_VALUE_EXPR_P (decl)) 7033 { 7034 tree value = get_base_address (DECL_VALUE_EXPR (decl)); 7035 7036 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value)) 7037 return omp_notice_threadprivate_variable (ctx, decl, value); 7038 } 7039 7040 if (gimplify_omp_ctxp->outer_context == NULL 7041 && VAR_P (decl) 7042 && oacc_get_fn_attrib (current_function_decl)) 7043 { 7044 location_t loc = DECL_SOURCE_LOCATION (decl); 7045 7046 if (lookup_attribute ("omp declare target link", 7047 DECL_ATTRIBUTES (decl))) 7048 { 7049 error_at (loc, 7050 "%qE with %<link%> clause used in %<routine%> function", 7051 DECL_NAME (decl)); 7052 return false; 7053 } 7054 else if (!lookup_attribute ("omp declare target", 7055 DECL_ATTRIBUTES (decl))) 7056 { 7057 error_at (loc, 7058 "%qE requires a %<declare%> directive for use " 7059 "in a %<routine%> function", DECL_NAME (decl)); 7060 return false; 7061 } 7062 } 7063 } 7064 7065 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 7066 if ((ctx->region_type & ORT_TARGET) != 0) 7067 { 7068 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true); 7069 if (n == NULL) 7070 { 7071 unsigned nflags = flags; 7072 if (ctx->target_map_pointers_as_0len_arrays 7073 || ctx->target_map_scalars_firstprivate) 7074 { 7075 bool is_declare_target = false; 7076 bool is_scalar = false; 7077 if (is_global_var (decl) 7078 && varpool_node::get_create (decl)->offloadable) 7079 { 7080 struct gimplify_omp_ctx *octx; 7081 for (octx = ctx->outer_context; 7082 octx; octx = octx->outer_context) 7083 { 7084 n = splay_tree_lookup (octx->variables, 7085 (splay_tree_key)decl); 7086 if (n 7087 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED 7088 && (n->value & GOVD_DATA_SHARE_CLASS) != 0) 7089 break; 7090 } 7091 is_declare_target = octx == NULL; 7092 } 7093 if (!is_declare_target && ctx->target_map_scalars_firstprivate) 7094 is_scalar = lang_hooks.decls.omp_scalar_p (decl); 7095 if (is_declare_target) 7096 ; 7097 else if (ctx->target_map_pointers_as_0len_arrays 7098 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE 7099 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE 7100 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) 7101 == POINTER_TYPE))) 7102 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY; 7103 else if (is_scalar) 7104 nflags |= GOVD_FIRSTPRIVATE; 7105 } 7106 7107 struct gimplify_omp_ctx *octx = ctx->outer_context; 7108 if ((ctx->region_type & ORT_ACC) && octx) 7109 { 7110 /* Look in outer OpenACC contexts, to see if there's a 7111 data attribute for this variable. */ 7112 omp_notice_variable (octx, decl, in_code); 7113 7114 for (; octx; octx = octx->outer_context) 7115 { 7116 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET))) 7117 break; 7118 splay_tree_node n2 7119 = splay_tree_lookup (octx->variables, 7120 (splay_tree_key) decl); 7121 if (n2) 7122 { 7123 if (octx->region_type == ORT_ACC_HOST_DATA) 7124 error ("variable %qE declared in enclosing " 7125 "%<host_data%> region", DECL_NAME (decl)); 7126 nflags |= GOVD_MAP; 7127 if (octx->region_type == ORT_ACC_DATA 7128 && (n2->value & GOVD_MAP_0LEN_ARRAY)) 7129 nflags |= GOVD_MAP_0LEN_ARRAY; 7130 goto found_outer; 7131 } 7132 } 7133 } 7134 7135 { 7136 tree type = TREE_TYPE (decl); 7137 7138 if (nflags == flags 7139 && gimplify_omp_ctxp->target_firstprivatize_array_bases 7140 && lang_hooks.decls.omp_privatize_by_reference (decl)) 7141 type = TREE_TYPE (type); 7142 if (nflags == flags 7143 && !lang_hooks.types.omp_mappable_type (type)) 7144 { 7145 error ("%qD referenced in target region does not have " 7146 "a mappable type", decl); 7147 nflags |= GOVD_MAP | GOVD_EXPLICIT; 7148 } 7149 else if (nflags == flags) 7150 { 7151 if ((ctx->region_type & ORT_ACC) != 0) 7152 nflags = oacc_default_clause (ctx, decl, flags); 7153 else 7154 nflags |= GOVD_MAP; 7155 } 7156 } 7157 found_outer: 7158 omp_add_variable (ctx, decl, nflags); 7159 } 7160 else 7161 { 7162 /* If nothing changed, there's nothing left to do. */ 7163 if ((n->value & flags) == flags) 7164 return ret; 7165 flags |= n->value; 7166 n->value = flags; 7167 } 7168 goto do_outer; 7169 } 7170 7171 if (n == NULL) 7172 { 7173 if (ctx->region_type == ORT_WORKSHARE 7174 || ctx->region_type == ORT_SIMD 7175 || ctx->region_type == ORT_ACC 7176 || (ctx->region_type & ORT_TARGET_DATA) != 0) 7177 goto do_outer; 7178 7179 flags = omp_default_clause (ctx, decl, in_code, flags); 7180 7181 if ((flags & GOVD_PRIVATE) 7182 && lang_hooks.decls.omp_private_outer_ref (decl)) 7183 flags |= GOVD_PRIVATE_OUTER_REF; 7184 7185 omp_add_variable (ctx, decl, flags); 7186 7187 shared = (flags & GOVD_SHARED) != 0; 7188 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared); 7189 goto do_outer; 7190 } 7191 7192 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0 7193 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN 7194 && DECL_SIZE (decl)) 7195 { 7196 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 7197 { 7198 splay_tree_node n2; 7199 tree t = DECL_VALUE_EXPR (decl); 7200 gcc_assert (TREE_CODE (t) == INDIRECT_REF); 7201 t = TREE_OPERAND (t, 0); 7202 gcc_assert (DECL_P (t)); 7203 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t); 7204 n2->value |= GOVD_SEEN; 7205 } 7206 else if (lang_hooks.decls.omp_privatize_by_reference (decl) 7207 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))) 7208 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))) 7209 != INTEGER_CST)) 7210 { 7211 splay_tree_node n2; 7212 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))); 7213 gcc_assert (DECL_P (t)); 7214 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t); 7215 if (n2) 7216 omp_notice_variable (ctx, t, true); 7217 } 7218 } 7219 7220 shared = ((flags | n->value) & GOVD_SHARED) != 0; 7221 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared); 7222 7223 /* If nothing changed, there's nothing left to do. */ 7224 if ((n->value & flags) == flags) 7225 return ret; 7226 flags |= n->value; 7227 n->value = flags; 7228 7229 do_outer: 7230 /* If the variable is private in the current context, then we don't 7231 need to propagate anything to an outer context. */ 7232 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF)) 7233 return ret; 7234 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER)) 7235 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER)) 7236 return ret; 7237 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE 7238 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER)) 7239 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER)) 7240 return ret; 7241 if (ctx->outer_context 7242 && omp_notice_variable (ctx->outer_context, decl, in_code)) 7243 return true; 7244 return ret; 7245 } 7246 7247 /* Verify that DECL is private within CTX. If there's specific information 7248 to the contrary in the innermost scope, generate an error. */ 7249 7250 static bool 7251 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd) 7252 { 7253 splay_tree_node n; 7254 7255 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 7256 if (n != NULL) 7257 { 7258 if (n->value & GOVD_SHARED) 7259 { 7260 if (ctx == gimplify_omp_ctxp) 7261 { 7262 if (simd) 7263 error ("iteration variable %qE is predetermined linear", 7264 DECL_NAME (decl)); 7265 else 7266 error ("iteration variable %qE should be private", 7267 DECL_NAME (decl)); 7268 n->value = GOVD_PRIVATE; 7269 return true; 7270 } 7271 else 7272 return false; 7273 } 7274 else if ((n->value & GOVD_EXPLICIT) != 0 7275 && (ctx == gimplify_omp_ctxp 7276 || (ctx->region_type == ORT_COMBINED_PARALLEL 7277 && gimplify_omp_ctxp->outer_context == ctx))) 7278 { 7279 if ((n->value & GOVD_FIRSTPRIVATE) != 0) 7280 error ("iteration variable %qE should not be firstprivate", 7281 DECL_NAME (decl)); 7282 else if ((n->value & GOVD_REDUCTION) != 0) 7283 error ("iteration variable %qE should not be reduction", 7284 DECL_NAME (decl)); 7285 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0) 7286 error ("iteration variable %qE should not be linear", 7287 DECL_NAME (decl)); 7288 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0) 7289 error ("iteration variable %qE should not be lastprivate", 7290 DECL_NAME (decl)); 7291 else if (simd && (n->value & GOVD_PRIVATE) != 0) 7292 error ("iteration variable %qE should not be private", 7293 DECL_NAME (decl)); 7294 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0) 7295 error ("iteration variable %qE is predetermined linear", 7296 DECL_NAME (decl)); 7297 } 7298 return (ctx == gimplify_omp_ctxp 7299 || (ctx->region_type == ORT_COMBINED_PARALLEL 7300 && gimplify_omp_ctxp->outer_context == ctx)); 7301 } 7302 7303 if (ctx->region_type != ORT_WORKSHARE 7304 && ctx->region_type != ORT_SIMD 7305 && ctx->region_type != ORT_ACC) 7306 return false; 7307 else if (ctx->outer_context) 7308 return omp_is_private (ctx->outer_context, decl, simd); 7309 return false; 7310 } 7311 7312 /* Return true if DECL is private within a parallel region 7313 that binds to the current construct's context or in parallel 7314 region's REDUCTION clause. */ 7315 7316 static bool 7317 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate) 7318 { 7319 splay_tree_node n; 7320 7321 do 7322 { 7323 ctx = ctx->outer_context; 7324 if (ctx == NULL) 7325 { 7326 if (is_global_var (decl)) 7327 return false; 7328 7329 /* References might be private, but might be shared too, 7330 when checking for copyprivate, assume they might be 7331 private, otherwise assume they might be shared. */ 7332 if (copyprivate) 7333 return true; 7334 7335 if (lang_hooks.decls.omp_privatize_by_reference (decl)) 7336 return false; 7337 7338 /* Treat C++ privatized non-static data members outside 7339 of the privatization the same. */ 7340 if (omp_member_access_dummy_var (decl)) 7341 return false; 7342 7343 return true; 7344 } 7345 7346 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 7347 7348 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0 7349 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)) 7350 continue; 7351 7352 if (n != NULL) 7353 { 7354 if ((n->value & GOVD_LOCAL) != 0 7355 && omp_member_access_dummy_var (decl)) 7356 return false; 7357 return (n->value & GOVD_SHARED) == 0; 7358 } 7359 } 7360 while (ctx->region_type == ORT_WORKSHARE 7361 || ctx->region_type == ORT_SIMD 7362 || ctx->region_type == ORT_ACC); 7363 return false; 7364 } 7365 7366 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */ 7367 7368 static tree 7369 find_decl_expr (tree *tp, int *walk_subtrees, void *data) 7370 { 7371 tree t = *tp; 7372 7373 /* If this node has been visited, unmark it and keep looking. */ 7374 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data) 7375 return t; 7376 7377 if (IS_TYPE_OR_DECL_P (t)) 7378 *walk_subtrees = 0; 7379 return NULL_TREE; 7380 } 7381 7382 /* Scan the OMP clauses in *LIST_P, installing mappings into a new 7383 and previous omp contexts. */ 7384 7385 static void 7386 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p, 7387 enum omp_region_type region_type, 7388 enum tree_code code) 7389 { 7390 struct gimplify_omp_ctx *ctx, *outer_ctx; 7391 tree c; 7392 hash_map<tree, tree> *struct_map_to_clause = NULL; 7393 tree *prev_list_p = NULL; 7394 7395 ctx = new_omp_context (region_type); 7396 outer_ctx = ctx->outer_context; 7397 if (code == OMP_TARGET) 7398 { 7399 if (!lang_GNU_Fortran ()) 7400 ctx->target_map_pointers_as_0len_arrays = true; 7401 ctx->target_map_scalars_firstprivate = true; 7402 } 7403 if (!lang_GNU_Fortran ()) 7404 switch (code) 7405 { 7406 case OMP_TARGET: 7407 case OMP_TARGET_DATA: 7408 case OMP_TARGET_ENTER_DATA: 7409 case OMP_TARGET_EXIT_DATA: 7410 case OACC_DECLARE: 7411 case OACC_HOST_DATA: 7412 ctx->target_firstprivatize_array_bases = true; 7413 default: 7414 break; 7415 } 7416 7417 while ((c = *list_p) != NULL) 7418 { 7419 bool remove = false; 7420 bool notice_outer = true; 7421 const char *check_non_private = NULL; 7422 unsigned int flags; 7423 tree decl; 7424 7425 switch (OMP_CLAUSE_CODE (c)) 7426 { 7427 case OMP_CLAUSE_PRIVATE: 7428 flags = GOVD_PRIVATE | GOVD_EXPLICIT; 7429 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c))) 7430 { 7431 flags |= GOVD_PRIVATE_OUTER_REF; 7432 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1; 7433 } 7434 else 7435 notice_outer = false; 7436 goto do_add; 7437 case OMP_CLAUSE_SHARED: 7438 flags = GOVD_SHARED | GOVD_EXPLICIT; 7439 goto do_add; 7440 case OMP_CLAUSE_FIRSTPRIVATE: 7441 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT; 7442 check_non_private = "firstprivate"; 7443 goto do_add; 7444 case OMP_CLAUSE_LASTPRIVATE: 7445 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT; 7446 check_non_private = "lastprivate"; 7447 decl = OMP_CLAUSE_DECL (c); 7448 if (error_operand_p (decl)) 7449 goto do_add; 7450 else if (outer_ctx 7451 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL 7452 || outer_ctx->region_type == ORT_COMBINED_TEAMS) 7453 && splay_tree_lookup (outer_ctx->variables, 7454 (splay_tree_key) decl) == NULL) 7455 { 7456 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN); 7457 if (outer_ctx->outer_context) 7458 omp_notice_variable (outer_ctx->outer_context, decl, true); 7459 } 7460 else if (outer_ctx 7461 && (outer_ctx->region_type & ORT_TASK) != 0 7462 && outer_ctx->combined_loop 7463 && splay_tree_lookup (outer_ctx->variables, 7464 (splay_tree_key) decl) == NULL) 7465 { 7466 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN); 7467 if (outer_ctx->outer_context) 7468 omp_notice_variable (outer_ctx->outer_context, decl, true); 7469 } 7470 else if (outer_ctx 7471 && (outer_ctx->region_type == ORT_WORKSHARE 7472 || outer_ctx->region_type == ORT_ACC) 7473 && outer_ctx->combined_loop 7474 && splay_tree_lookup (outer_ctx->variables, 7475 (splay_tree_key) decl) == NULL 7476 && !omp_check_private (outer_ctx, decl, false)) 7477 { 7478 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN); 7479 if (outer_ctx->outer_context 7480 && (outer_ctx->outer_context->region_type 7481 == ORT_COMBINED_PARALLEL) 7482 && splay_tree_lookup (outer_ctx->outer_context->variables, 7483 (splay_tree_key) decl) == NULL) 7484 { 7485 struct gimplify_omp_ctx *octx = outer_ctx->outer_context; 7486 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN); 7487 if (octx->outer_context) 7488 { 7489 octx = octx->outer_context; 7490 if (octx->region_type == ORT_WORKSHARE 7491 && octx->combined_loop 7492 && splay_tree_lookup (octx->variables, 7493 (splay_tree_key) decl) == NULL 7494 && !omp_check_private (octx, decl, false)) 7495 { 7496 omp_add_variable (octx, decl, 7497 GOVD_LASTPRIVATE | GOVD_SEEN); 7498 octx = octx->outer_context; 7499 if (octx 7500 && octx->region_type == ORT_COMBINED_TEAMS 7501 && (splay_tree_lookup (octx->variables, 7502 (splay_tree_key) decl) 7503 == NULL)) 7504 { 7505 omp_add_variable (octx, decl, 7506 GOVD_SHARED | GOVD_SEEN); 7507 octx = octx->outer_context; 7508 } 7509 } 7510 if (octx) 7511 omp_notice_variable (octx, decl, true); 7512 } 7513 } 7514 else if (outer_ctx->outer_context) 7515 omp_notice_variable (outer_ctx->outer_context, decl, true); 7516 } 7517 goto do_add; 7518 case OMP_CLAUSE_REDUCTION: 7519 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT; 7520 /* OpenACC permits reductions on private variables. */ 7521 if (!(region_type & ORT_ACC)) 7522 check_non_private = "reduction"; 7523 decl = OMP_CLAUSE_DECL (c); 7524 if (TREE_CODE (decl) == MEM_REF) 7525 { 7526 tree type = TREE_TYPE (decl); 7527 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p, 7528 NULL, is_gimple_val, fb_rvalue, false) 7529 == GS_ERROR) 7530 { 7531 remove = true; 7532 break; 7533 } 7534 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); 7535 if (DECL_P (v)) 7536 { 7537 omp_firstprivatize_variable (ctx, v); 7538 omp_notice_variable (ctx, v, true); 7539 } 7540 decl = TREE_OPERAND (decl, 0); 7541 if (TREE_CODE (decl) == POINTER_PLUS_EXPR) 7542 { 7543 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p, 7544 NULL, is_gimple_val, fb_rvalue, false) 7545 == GS_ERROR) 7546 { 7547 remove = true; 7548 break; 7549 } 7550 v = TREE_OPERAND (decl, 1); 7551 if (DECL_P (v)) 7552 { 7553 omp_firstprivatize_variable (ctx, v); 7554 omp_notice_variable (ctx, v, true); 7555 } 7556 decl = TREE_OPERAND (decl, 0); 7557 } 7558 if (TREE_CODE (decl) == ADDR_EXPR 7559 || TREE_CODE (decl) == INDIRECT_REF) 7560 decl = TREE_OPERAND (decl, 0); 7561 } 7562 goto do_add_decl; 7563 case OMP_CLAUSE_LINEAR: 7564 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL, 7565 is_gimple_val, fb_rvalue) == GS_ERROR) 7566 { 7567 remove = true; 7568 break; 7569 } 7570 else 7571 { 7572 if (code == OMP_SIMD 7573 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c)) 7574 { 7575 struct gimplify_omp_ctx *octx = outer_ctx; 7576 if (octx 7577 && octx->region_type == ORT_WORKSHARE 7578 && octx->combined_loop 7579 && !octx->distribute) 7580 { 7581 if (octx->outer_context 7582 && (octx->outer_context->region_type 7583 == ORT_COMBINED_PARALLEL)) 7584 octx = octx->outer_context->outer_context; 7585 else 7586 octx = octx->outer_context; 7587 } 7588 if (octx 7589 && octx->region_type == ORT_WORKSHARE 7590 && octx->combined_loop 7591 && octx->distribute) 7592 { 7593 error_at (OMP_CLAUSE_LOCATION (c), 7594 "%<linear%> clause for variable other than " 7595 "loop iterator specified on construct " 7596 "combined with %<distribute%>"); 7597 remove = true; 7598 break; 7599 } 7600 } 7601 /* For combined #pragma omp parallel for simd, need to put 7602 lastprivate and perhaps firstprivate too on the 7603 parallel. Similarly for #pragma omp for simd. */ 7604 struct gimplify_omp_ctx *octx = outer_ctx; 7605 decl = NULL_TREE; 7606 do 7607 { 7608 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c) 7609 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 7610 break; 7611 decl = OMP_CLAUSE_DECL (c); 7612 if (error_operand_p (decl)) 7613 { 7614 decl = NULL_TREE; 7615 break; 7616 } 7617 flags = GOVD_SEEN; 7618 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)) 7619 flags |= GOVD_FIRSTPRIVATE; 7620 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 7621 flags |= GOVD_LASTPRIVATE; 7622 if (octx 7623 && octx->region_type == ORT_WORKSHARE 7624 && octx->combined_loop) 7625 { 7626 if (octx->outer_context 7627 && (octx->outer_context->region_type 7628 == ORT_COMBINED_PARALLEL)) 7629 octx = octx->outer_context; 7630 else if (omp_check_private (octx, decl, false)) 7631 break; 7632 } 7633 else if (octx 7634 && (octx->region_type & ORT_TASK) != 0 7635 && octx->combined_loop) 7636 ; 7637 else if (octx 7638 && octx->region_type == ORT_COMBINED_PARALLEL 7639 && ctx->region_type == ORT_WORKSHARE 7640 && octx == outer_ctx) 7641 flags = GOVD_SEEN | GOVD_SHARED; 7642 else if (octx 7643 && octx->region_type == ORT_COMBINED_TEAMS) 7644 flags = GOVD_SEEN | GOVD_SHARED; 7645 else if (octx 7646 && octx->region_type == ORT_COMBINED_TARGET) 7647 { 7648 flags &= ~GOVD_LASTPRIVATE; 7649 if (flags == GOVD_SEEN) 7650 break; 7651 } 7652 else 7653 break; 7654 splay_tree_node on 7655 = splay_tree_lookup (octx->variables, 7656 (splay_tree_key) decl); 7657 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0) 7658 { 7659 octx = NULL; 7660 break; 7661 } 7662 omp_add_variable (octx, decl, flags); 7663 if (octx->outer_context == NULL) 7664 break; 7665 octx = octx->outer_context; 7666 } 7667 while (1); 7668 if (octx 7669 && decl 7670 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c) 7671 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))) 7672 omp_notice_variable (octx, decl, true); 7673 } 7674 flags = GOVD_LINEAR | GOVD_EXPLICIT; 7675 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c) 7676 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 7677 { 7678 notice_outer = false; 7679 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER; 7680 } 7681 goto do_add; 7682 7683 case OMP_CLAUSE_MAP: 7684 decl = OMP_CLAUSE_DECL (c); 7685 if (error_operand_p (decl)) 7686 remove = true; 7687 switch (code) 7688 { 7689 case OMP_TARGET: 7690 break; 7691 case OACC_DATA: 7692 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE) 7693 break; 7694 /* FALLTHRU */ 7695 case OMP_TARGET_DATA: 7696 case OMP_TARGET_ENTER_DATA: 7697 case OMP_TARGET_EXIT_DATA: 7698 case OACC_ENTER_DATA: 7699 case OACC_EXIT_DATA: 7700 case OACC_HOST_DATA: 7701 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER 7702 || (OMP_CLAUSE_MAP_KIND (c) 7703 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)) 7704 /* For target {,enter ,exit }data only the array slice is 7705 mapped, but not the pointer to it. */ 7706 remove = true; 7707 break; 7708 default: 7709 break; 7710 } 7711 if (remove) 7712 break; 7713 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC)) 7714 { 7715 struct gimplify_omp_ctx *octx; 7716 for (octx = outer_ctx; octx; octx = octx->outer_context) 7717 { 7718 if (octx->region_type != ORT_ACC_HOST_DATA) 7719 break; 7720 splay_tree_node n2 7721 = splay_tree_lookup (octx->variables, 7722 (splay_tree_key) decl); 7723 if (n2) 7724 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE " 7725 "declared in enclosing %<host_data%> region", 7726 DECL_NAME (decl)); 7727 } 7728 } 7729 if (OMP_CLAUSE_SIZE (c) == NULL_TREE) 7730 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl) 7731 : TYPE_SIZE_UNIT (TREE_TYPE (decl)); 7732 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p, 7733 NULL, is_gimple_val, fb_rvalue) == GS_ERROR) 7734 { 7735 remove = true; 7736 break; 7737 } 7738 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER 7739 || (OMP_CLAUSE_MAP_KIND (c) 7740 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)) 7741 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST) 7742 { 7743 OMP_CLAUSE_SIZE (c) 7744 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL, 7745 false); 7746 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c), 7747 GOVD_FIRSTPRIVATE | GOVD_SEEN); 7748 } 7749 if (!DECL_P (decl)) 7750 { 7751 tree d = decl, *pd; 7752 if (TREE_CODE (d) == ARRAY_REF) 7753 { 7754 while (TREE_CODE (d) == ARRAY_REF) 7755 d = TREE_OPERAND (d, 0); 7756 if (TREE_CODE (d) == COMPONENT_REF 7757 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE) 7758 decl = d; 7759 } 7760 pd = &OMP_CLAUSE_DECL (c); 7761 if (d == decl 7762 && TREE_CODE (decl) == INDIRECT_REF 7763 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF 7764 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0))) 7765 == REFERENCE_TYPE)) 7766 { 7767 pd = &TREE_OPERAND (decl, 0); 7768 decl = TREE_OPERAND (decl, 0); 7769 } 7770 if (TREE_CODE (decl) == COMPONENT_REF) 7771 { 7772 while (TREE_CODE (decl) == COMPONENT_REF) 7773 decl = TREE_OPERAND (decl, 0); 7774 if (TREE_CODE (decl) == INDIRECT_REF 7775 && DECL_P (TREE_OPERAND (decl, 0)) 7776 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0))) 7777 == REFERENCE_TYPE)) 7778 decl = TREE_OPERAND (decl, 0); 7779 } 7780 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue) 7781 == GS_ERROR) 7782 { 7783 remove = true; 7784 break; 7785 } 7786 if (DECL_P (decl)) 7787 { 7788 if (error_operand_p (decl)) 7789 { 7790 remove = true; 7791 break; 7792 } 7793 7794 tree stype = TREE_TYPE (decl); 7795 if (TREE_CODE (stype) == REFERENCE_TYPE) 7796 stype = TREE_TYPE (stype); 7797 if (TYPE_SIZE_UNIT (stype) == NULL 7798 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST) 7799 { 7800 error_at (OMP_CLAUSE_LOCATION (c), 7801 "mapping field %qE of variable length " 7802 "structure", OMP_CLAUSE_DECL (c)); 7803 remove = true; 7804 break; 7805 } 7806 7807 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER) 7808 { 7809 /* Error recovery. */ 7810 if (prev_list_p == NULL) 7811 { 7812 remove = true; 7813 break; 7814 } 7815 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c) 7816 { 7817 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p); 7818 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c) 7819 { 7820 remove = true; 7821 break; 7822 } 7823 } 7824 } 7825 7826 tree offset; 7827 HOST_WIDE_INT bitsize, bitpos; 7828 machine_mode mode; 7829 int unsignedp, reversep, volatilep = 0; 7830 tree base = OMP_CLAUSE_DECL (c); 7831 while (TREE_CODE (base) == ARRAY_REF) 7832 base = TREE_OPERAND (base, 0); 7833 if (TREE_CODE (base) == INDIRECT_REF) 7834 base = TREE_OPERAND (base, 0); 7835 base = get_inner_reference (base, &bitsize, &bitpos, &offset, 7836 &mode, &unsignedp, &reversep, 7837 &volatilep); 7838 tree orig_base = base; 7839 if ((TREE_CODE (base) == INDIRECT_REF 7840 || (TREE_CODE (base) == MEM_REF 7841 && integer_zerop (TREE_OPERAND (base, 1)))) 7842 && DECL_P (TREE_OPERAND (base, 0)) 7843 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0))) 7844 == REFERENCE_TYPE)) 7845 base = TREE_OPERAND (base, 0); 7846 gcc_assert (base == decl 7847 && (offset == NULL_TREE 7848 || TREE_CODE (offset) == INTEGER_CST)); 7849 7850 splay_tree_node n 7851 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 7852 bool ptr = (OMP_CLAUSE_MAP_KIND (c) 7853 == GOMP_MAP_ALWAYS_POINTER); 7854 if (n == NULL || (n->value & GOVD_MAP) == 0) 7855 { 7856 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c), 7857 OMP_CLAUSE_MAP); 7858 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT); 7859 if (orig_base != base) 7860 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base); 7861 else 7862 OMP_CLAUSE_DECL (l) = decl; 7863 OMP_CLAUSE_SIZE (l) = size_int (1); 7864 if (struct_map_to_clause == NULL) 7865 struct_map_to_clause = new hash_map<tree, tree>; 7866 struct_map_to_clause->put (decl, l); 7867 if (ptr) 7868 { 7869 enum gomp_map_kind mkind 7870 = code == OMP_TARGET_EXIT_DATA 7871 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC; 7872 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 7873 OMP_CLAUSE_MAP); 7874 OMP_CLAUSE_SET_MAP_KIND (c2, mkind); 7875 OMP_CLAUSE_DECL (c2) 7876 = unshare_expr (OMP_CLAUSE_DECL (c)); 7877 OMP_CLAUSE_CHAIN (c2) = *prev_list_p; 7878 OMP_CLAUSE_SIZE (c2) 7879 = TYPE_SIZE_UNIT (ptr_type_node); 7880 OMP_CLAUSE_CHAIN (l) = c2; 7881 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c) 7882 { 7883 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p); 7884 tree c3 7885 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 7886 OMP_CLAUSE_MAP); 7887 OMP_CLAUSE_SET_MAP_KIND (c3, mkind); 7888 OMP_CLAUSE_DECL (c3) 7889 = unshare_expr (OMP_CLAUSE_DECL (c4)); 7890 OMP_CLAUSE_SIZE (c3) 7891 = TYPE_SIZE_UNIT (ptr_type_node); 7892 OMP_CLAUSE_CHAIN (c3) = *prev_list_p; 7893 OMP_CLAUSE_CHAIN (c2) = c3; 7894 } 7895 *prev_list_p = l; 7896 prev_list_p = NULL; 7897 } 7898 else 7899 { 7900 OMP_CLAUSE_CHAIN (l) = c; 7901 *list_p = l; 7902 list_p = &OMP_CLAUSE_CHAIN (l); 7903 } 7904 if (orig_base != base && code == OMP_TARGET) 7905 { 7906 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 7907 OMP_CLAUSE_MAP); 7908 enum gomp_map_kind mkind 7909 = GOMP_MAP_FIRSTPRIVATE_REFERENCE; 7910 OMP_CLAUSE_SET_MAP_KIND (c2, mkind); 7911 OMP_CLAUSE_DECL (c2) = decl; 7912 OMP_CLAUSE_SIZE (c2) = size_zero_node; 7913 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l); 7914 OMP_CLAUSE_CHAIN (l) = c2; 7915 } 7916 flags = GOVD_MAP | GOVD_EXPLICIT; 7917 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr) 7918 flags |= GOVD_SEEN; 7919 goto do_add_decl; 7920 } 7921 else 7922 { 7923 tree *osc = struct_map_to_clause->get (decl); 7924 tree *sc = NULL, *scp = NULL; 7925 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr) 7926 n->value |= GOVD_SEEN; 7927 offset_int o1, o2; 7928 if (offset) 7929 o1 = wi::to_offset (offset); 7930 else 7931 o1 = 0; 7932 if (bitpos) 7933 o1 = o1 + bitpos / BITS_PER_UNIT; 7934 sc = &OMP_CLAUSE_CHAIN (*osc); 7935 if (*sc != c 7936 && (OMP_CLAUSE_MAP_KIND (*sc) 7937 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)) 7938 sc = &OMP_CLAUSE_CHAIN (*sc); 7939 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc)) 7940 if (ptr && sc == prev_list_p) 7941 break; 7942 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc)) 7943 != COMPONENT_REF 7944 && (TREE_CODE (OMP_CLAUSE_DECL (*sc)) 7945 != INDIRECT_REF) 7946 && (TREE_CODE (OMP_CLAUSE_DECL (*sc)) 7947 != ARRAY_REF)) 7948 break; 7949 else 7950 { 7951 tree offset2; 7952 HOST_WIDE_INT bitsize2, bitpos2; 7953 base = OMP_CLAUSE_DECL (*sc); 7954 if (TREE_CODE (base) == ARRAY_REF) 7955 { 7956 while (TREE_CODE (base) == ARRAY_REF) 7957 base = TREE_OPERAND (base, 0); 7958 if (TREE_CODE (base) != COMPONENT_REF 7959 || (TREE_CODE (TREE_TYPE (base)) 7960 != ARRAY_TYPE)) 7961 break; 7962 } 7963 else if (TREE_CODE (base) == INDIRECT_REF 7964 && (TREE_CODE (TREE_OPERAND (base, 0)) 7965 == COMPONENT_REF) 7966 && (TREE_CODE (TREE_TYPE 7967 (TREE_OPERAND (base, 0))) 7968 == REFERENCE_TYPE)) 7969 base = TREE_OPERAND (base, 0); 7970 base = get_inner_reference (base, &bitsize2, 7971 &bitpos2, &offset2, 7972 &mode, &unsignedp, 7973 &reversep, &volatilep); 7974 if ((TREE_CODE (base) == INDIRECT_REF 7975 || (TREE_CODE (base) == MEM_REF 7976 && integer_zerop (TREE_OPERAND (base, 7977 1)))) 7978 && DECL_P (TREE_OPERAND (base, 0)) 7979 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 7980 0))) 7981 == REFERENCE_TYPE)) 7982 base = TREE_OPERAND (base, 0); 7983 if (base != decl) 7984 break; 7985 if (scp) 7986 continue; 7987 gcc_assert (offset == NULL_TREE 7988 || TREE_CODE (offset) == INTEGER_CST); 7989 tree d1 = OMP_CLAUSE_DECL (*sc); 7990 tree d2 = OMP_CLAUSE_DECL (c); 7991 while (TREE_CODE (d1) == ARRAY_REF) 7992 d1 = TREE_OPERAND (d1, 0); 7993 while (TREE_CODE (d2) == ARRAY_REF) 7994 d2 = TREE_OPERAND (d2, 0); 7995 if (TREE_CODE (d1) == INDIRECT_REF) 7996 d1 = TREE_OPERAND (d1, 0); 7997 if (TREE_CODE (d2) == INDIRECT_REF) 7998 d2 = TREE_OPERAND (d2, 0); 7999 while (TREE_CODE (d1) == COMPONENT_REF) 8000 if (TREE_CODE (d2) == COMPONENT_REF 8001 && TREE_OPERAND (d1, 1) 8002 == TREE_OPERAND (d2, 1)) 8003 { 8004 d1 = TREE_OPERAND (d1, 0); 8005 d2 = TREE_OPERAND (d2, 0); 8006 } 8007 else 8008 break; 8009 if (d1 == d2) 8010 { 8011 error_at (OMP_CLAUSE_LOCATION (c), 8012 "%qE appears more than once in map " 8013 "clauses", OMP_CLAUSE_DECL (c)); 8014 remove = true; 8015 break; 8016 } 8017 if (offset2) 8018 o2 = wi::to_offset (offset2); 8019 else 8020 o2 = 0; 8021 if (bitpos2) 8022 o2 = o2 + bitpos2 / BITS_PER_UNIT; 8023 if (wi::ltu_p (o1, o2) 8024 || (wi::eq_p (o1, o2) && bitpos < bitpos2)) 8025 { 8026 if (ptr) 8027 scp = sc; 8028 else 8029 break; 8030 } 8031 } 8032 if (remove) 8033 break; 8034 OMP_CLAUSE_SIZE (*osc) 8035 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), 8036 size_one_node); 8037 if (ptr) 8038 { 8039 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 8040 OMP_CLAUSE_MAP); 8041 tree cl = NULL_TREE; 8042 enum gomp_map_kind mkind 8043 = code == OMP_TARGET_EXIT_DATA 8044 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC; 8045 OMP_CLAUSE_SET_MAP_KIND (c2, mkind); 8046 OMP_CLAUSE_DECL (c2) 8047 = unshare_expr (OMP_CLAUSE_DECL (c)); 8048 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p; 8049 OMP_CLAUSE_SIZE (c2) 8050 = TYPE_SIZE_UNIT (ptr_type_node); 8051 cl = scp ? *prev_list_p : c2; 8052 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c) 8053 { 8054 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p); 8055 tree c3 8056 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 8057 OMP_CLAUSE_MAP); 8058 OMP_CLAUSE_SET_MAP_KIND (c3, mkind); 8059 OMP_CLAUSE_DECL (c3) 8060 = unshare_expr (OMP_CLAUSE_DECL (c4)); 8061 OMP_CLAUSE_SIZE (c3) 8062 = TYPE_SIZE_UNIT (ptr_type_node); 8063 OMP_CLAUSE_CHAIN (c3) = *prev_list_p; 8064 if (!scp) 8065 OMP_CLAUSE_CHAIN (c2) = c3; 8066 else 8067 cl = c3; 8068 } 8069 if (scp) 8070 *scp = c2; 8071 if (sc == prev_list_p) 8072 { 8073 *sc = cl; 8074 prev_list_p = NULL; 8075 } 8076 else 8077 { 8078 *prev_list_p = OMP_CLAUSE_CHAIN (c); 8079 list_p = prev_list_p; 8080 prev_list_p = NULL; 8081 OMP_CLAUSE_CHAIN (c) = *sc; 8082 *sc = cl; 8083 continue; 8084 } 8085 } 8086 else if (*sc != c) 8087 { 8088 *list_p = OMP_CLAUSE_CHAIN (c); 8089 OMP_CLAUSE_CHAIN (c) = *sc; 8090 *sc = c; 8091 continue; 8092 } 8093 } 8094 } 8095 if (!remove 8096 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER 8097 && OMP_CLAUSE_CHAIN (c) 8098 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP 8099 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c)) 8100 == GOMP_MAP_ALWAYS_POINTER)) 8101 prev_list_p = list_p; 8102 break; 8103 } 8104 flags = GOVD_MAP | GOVD_EXPLICIT; 8105 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO 8106 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM) 8107 flags |= GOVD_MAP_ALWAYS_TO; 8108 goto do_add; 8109 8110 case OMP_CLAUSE_DEPEND: 8111 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK) 8112 { 8113 tree deps = OMP_CLAUSE_DECL (c); 8114 while (deps && TREE_CODE (deps) == TREE_LIST) 8115 { 8116 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR 8117 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1))) 8118 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1), 8119 pre_p, NULL, is_gimple_val, fb_rvalue); 8120 deps = TREE_CHAIN (deps); 8121 } 8122 break; 8123 } 8124 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE) 8125 break; 8126 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR) 8127 { 8128 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p, 8129 NULL, is_gimple_val, fb_rvalue); 8130 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1); 8131 } 8132 if (error_operand_p (OMP_CLAUSE_DECL (c))) 8133 { 8134 remove = true; 8135 break; 8136 } 8137 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c)); 8138 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL, 8139 is_gimple_val, fb_rvalue) == GS_ERROR) 8140 { 8141 remove = true; 8142 break; 8143 } 8144 break; 8145 8146 case OMP_CLAUSE_TO: 8147 case OMP_CLAUSE_FROM: 8148 case OMP_CLAUSE__CACHE_: 8149 decl = OMP_CLAUSE_DECL (c); 8150 if (error_operand_p (decl)) 8151 { 8152 remove = true; 8153 break; 8154 } 8155 if (OMP_CLAUSE_SIZE (c) == NULL_TREE) 8156 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl) 8157 : TYPE_SIZE_UNIT (TREE_TYPE (decl)); 8158 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p, 8159 NULL, is_gimple_val, fb_rvalue) == GS_ERROR) 8160 { 8161 remove = true; 8162 break; 8163 } 8164 if (!DECL_P (decl)) 8165 { 8166 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, 8167 NULL, is_gimple_lvalue, fb_lvalue) 8168 == GS_ERROR) 8169 { 8170 remove = true; 8171 break; 8172 } 8173 break; 8174 } 8175 goto do_notice; 8176 8177 case OMP_CLAUSE_USE_DEVICE_PTR: 8178 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT; 8179 goto do_add; 8180 case OMP_CLAUSE_IS_DEVICE_PTR: 8181 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT; 8182 goto do_add; 8183 8184 do_add: 8185 decl = OMP_CLAUSE_DECL (c); 8186 do_add_decl: 8187 if (error_operand_p (decl)) 8188 { 8189 remove = true; 8190 break; 8191 } 8192 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0) 8193 { 8194 tree t = omp_member_access_dummy_var (decl); 8195 if (t) 8196 { 8197 tree v = DECL_VALUE_EXPR (decl); 8198 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1)); 8199 if (outer_ctx) 8200 omp_notice_variable (outer_ctx, t, true); 8201 } 8202 } 8203 if (code == OACC_DATA 8204 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 8205 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER) 8206 flags |= GOVD_MAP_0LEN_ARRAY; 8207 omp_add_variable (ctx, decl, flags); 8208 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION 8209 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 8210 { 8211 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c), 8212 GOVD_LOCAL | GOVD_SEEN); 8213 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) 8214 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c), 8215 find_decl_expr, 8216 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c), 8217 NULL) == NULL_TREE) 8218 omp_add_variable (ctx, 8219 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c), 8220 GOVD_LOCAL | GOVD_SEEN); 8221 gimplify_omp_ctxp = ctx; 8222 push_gimplify_context (); 8223 8224 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL; 8225 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; 8226 8227 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c), 8228 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)); 8229 pop_gimplify_context 8230 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))); 8231 push_gimplify_context (); 8232 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c), 8233 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)); 8234 pop_gimplify_context 8235 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c))); 8236 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE; 8237 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE; 8238 8239 gimplify_omp_ctxp = outer_ctx; 8240 } 8241 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 8242 && OMP_CLAUSE_LASTPRIVATE_STMT (c)) 8243 { 8244 gimplify_omp_ctxp = ctx; 8245 push_gimplify_context (); 8246 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR) 8247 { 8248 tree bind = build3 (BIND_EXPR, void_type_node, NULL, 8249 NULL, NULL); 8250 TREE_SIDE_EFFECTS (bind) = 1; 8251 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c); 8252 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind; 8253 } 8254 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c), 8255 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)); 8256 pop_gimplify_context 8257 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))); 8258 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE; 8259 8260 gimplify_omp_ctxp = outer_ctx; 8261 } 8262 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 8263 && OMP_CLAUSE_LINEAR_STMT (c)) 8264 { 8265 gimplify_omp_ctxp = ctx; 8266 push_gimplify_context (); 8267 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR) 8268 { 8269 tree bind = build3 (BIND_EXPR, void_type_node, NULL, 8270 NULL, NULL); 8271 TREE_SIDE_EFFECTS (bind) = 1; 8272 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c); 8273 OMP_CLAUSE_LINEAR_STMT (c) = bind; 8274 } 8275 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c), 8276 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)); 8277 pop_gimplify_context 8278 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))); 8279 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE; 8280 8281 gimplify_omp_ctxp = outer_ctx; 8282 } 8283 if (notice_outer) 8284 goto do_notice; 8285 break; 8286 8287 case OMP_CLAUSE_COPYIN: 8288 case OMP_CLAUSE_COPYPRIVATE: 8289 decl = OMP_CLAUSE_DECL (c); 8290 if (error_operand_p (decl)) 8291 { 8292 remove = true; 8293 break; 8294 } 8295 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE 8296 && !remove 8297 && !omp_check_private (ctx, decl, true)) 8298 { 8299 remove = true; 8300 if (is_global_var (decl)) 8301 { 8302 if (DECL_THREAD_LOCAL_P (decl)) 8303 remove = false; 8304 else if (DECL_HAS_VALUE_EXPR_P (decl)) 8305 { 8306 tree value = get_base_address (DECL_VALUE_EXPR (decl)); 8307 8308 if (value 8309 && DECL_P (value) 8310 && DECL_THREAD_LOCAL_P (value)) 8311 remove = false; 8312 } 8313 } 8314 if (remove) 8315 error_at (OMP_CLAUSE_LOCATION (c), 8316 "copyprivate variable %qE is not threadprivate" 8317 " or private in outer context", DECL_NAME (decl)); 8318 } 8319 do_notice: 8320 if (outer_ctx) 8321 omp_notice_variable (outer_ctx, decl, true); 8322 if (check_non_private 8323 && region_type == ORT_WORKSHARE 8324 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION 8325 || decl == OMP_CLAUSE_DECL (c) 8326 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF 8327 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0)) 8328 == ADDR_EXPR 8329 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0)) 8330 == POINTER_PLUS_EXPR 8331 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND 8332 (OMP_CLAUSE_DECL (c), 0), 0)) 8333 == ADDR_EXPR))))) 8334 && omp_check_private (ctx, decl, false)) 8335 { 8336 error ("%s variable %qE is private in outer context", 8337 check_non_private, DECL_NAME (decl)); 8338 remove = true; 8339 } 8340 break; 8341 8342 case OMP_CLAUSE_IF: 8343 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK 8344 && OMP_CLAUSE_IF_MODIFIER (c) != code) 8345 { 8346 const char *p[2]; 8347 for (int i = 0; i < 2; i++) 8348 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code) 8349 { 8350 case OMP_PARALLEL: p[i] = "parallel"; break; 8351 case OMP_TASK: p[i] = "task"; break; 8352 case OMP_TASKLOOP: p[i] = "taskloop"; break; 8353 case OMP_TARGET_DATA: p[i] = "target data"; break; 8354 case OMP_TARGET: p[i] = "target"; break; 8355 case OMP_TARGET_UPDATE: p[i] = "target update"; break; 8356 case OMP_TARGET_ENTER_DATA: 8357 p[i] = "target enter data"; break; 8358 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break; 8359 default: gcc_unreachable (); 8360 } 8361 error_at (OMP_CLAUSE_LOCATION (c), 8362 "expected %qs %<if%> clause modifier rather than %qs", 8363 p[0], p[1]); 8364 remove = true; 8365 } 8366 /* Fall through. */ 8367 8368 case OMP_CLAUSE_FINAL: 8369 OMP_CLAUSE_OPERAND (c, 0) 8370 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0)); 8371 /* Fall through. */ 8372 8373 case OMP_CLAUSE_SCHEDULE: 8374 case OMP_CLAUSE_NUM_THREADS: 8375 case OMP_CLAUSE_NUM_TEAMS: 8376 case OMP_CLAUSE_THREAD_LIMIT: 8377 case OMP_CLAUSE_DIST_SCHEDULE: 8378 case OMP_CLAUSE_DEVICE: 8379 case OMP_CLAUSE_PRIORITY: 8380 case OMP_CLAUSE_GRAINSIZE: 8381 case OMP_CLAUSE_NUM_TASKS: 8382 case OMP_CLAUSE_HINT: 8383 case OMP_CLAUSE__CILK_FOR_COUNT_: 8384 case OMP_CLAUSE_ASYNC: 8385 case OMP_CLAUSE_WAIT: 8386 case OMP_CLAUSE_NUM_GANGS: 8387 case OMP_CLAUSE_NUM_WORKERS: 8388 case OMP_CLAUSE_VECTOR_LENGTH: 8389 case OMP_CLAUSE_WORKER: 8390 case OMP_CLAUSE_VECTOR: 8391 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL, 8392 is_gimple_val, fb_rvalue) == GS_ERROR) 8393 remove = true; 8394 break; 8395 8396 case OMP_CLAUSE_GANG: 8397 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL, 8398 is_gimple_val, fb_rvalue) == GS_ERROR) 8399 remove = true; 8400 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL, 8401 is_gimple_val, fb_rvalue) == GS_ERROR) 8402 remove = true; 8403 break; 8404 8405 case OMP_CLAUSE_NOWAIT: 8406 case OMP_CLAUSE_ORDERED: 8407 case OMP_CLAUSE_UNTIED: 8408 case OMP_CLAUSE_COLLAPSE: 8409 case OMP_CLAUSE_TILE: 8410 case OMP_CLAUSE_AUTO: 8411 case OMP_CLAUSE_SEQ: 8412 case OMP_CLAUSE_INDEPENDENT: 8413 case OMP_CLAUSE_MERGEABLE: 8414 case OMP_CLAUSE_PROC_BIND: 8415 case OMP_CLAUSE_SAFELEN: 8416 case OMP_CLAUSE_SIMDLEN: 8417 case OMP_CLAUSE_NOGROUP: 8418 case OMP_CLAUSE_THREADS: 8419 case OMP_CLAUSE_SIMD: 8420 break; 8421 8422 case OMP_CLAUSE_DEFAULTMAP: 8423 ctx->target_map_scalars_firstprivate = false; 8424 break; 8425 8426 case OMP_CLAUSE_ALIGNED: 8427 decl = OMP_CLAUSE_DECL (c); 8428 if (error_operand_p (decl)) 8429 { 8430 remove = true; 8431 break; 8432 } 8433 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL, 8434 is_gimple_val, fb_rvalue) == GS_ERROR) 8435 { 8436 remove = true; 8437 break; 8438 } 8439 if (!is_global_var (decl) 8440 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE) 8441 omp_add_variable (ctx, decl, GOVD_ALIGNED); 8442 break; 8443 8444 case OMP_CLAUSE_DEFAULT: 8445 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c); 8446 break; 8447 8448 default: 8449 gcc_unreachable (); 8450 } 8451 8452 if (code == OACC_DATA 8453 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 8454 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER) 8455 remove = true; 8456 if (remove) 8457 *list_p = OMP_CLAUSE_CHAIN (c); 8458 else 8459 list_p = &OMP_CLAUSE_CHAIN (c); 8460 } 8461 8462 gimplify_omp_ctxp = ctx; 8463 if (struct_map_to_clause) 8464 delete struct_map_to_clause; 8465 } 8466 8467 /* Return true if DECL is a candidate for shared to firstprivate 8468 optimization. We only consider non-addressable scalars, not 8469 too big, and not references. */ 8470 8471 static bool 8472 omp_shared_to_firstprivate_optimizable_decl_p (tree decl) 8473 { 8474 if (TREE_ADDRESSABLE (decl)) 8475 return false; 8476 tree type = TREE_TYPE (decl); 8477 if (!is_gimple_reg_type (type) 8478 || TREE_CODE (type) == REFERENCE_TYPE 8479 || TREE_ADDRESSABLE (type)) 8480 return false; 8481 /* Don't optimize too large decls, as each thread/task will have 8482 its own. */ 8483 HOST_WIDE_INT len = int_size_in_bytes (type); 8484 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT) 8485 return false; 8486 if (lang_hooks.decls.omp_privatize_by_reference (decl)) 8487 return false; 8488 return true; 8489 } 8490 8491 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*. 8492 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as 8493 GOVD_WRITTEN in outer contexts. */ 8494 8495 static void 8496 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl) 8497 { 8498 for (; ctx; ctx = ctx->outer_context) 8499 { 8500 splay_tree_node n = splay_tree_lookup (ctx->variables, 8501 (splay_tree_key) decl); 8502 if (n == NULL) 8503 continue; 8504 else if (n->value & GOVD_SHARED) 8505 { 8506 n->value |= GOVD_WRITTEN; 8507 return; 8508 } 8509 else if (n->value & GOVD_DATA_SHARE_CLASS) 8510 return; 8511 } 8512 } 8513 8514 /* Helper callback for walk_gimple_seq to discover possible stores 8515 to omp_shared_to_firstprivate_optimizable_decl_p decls and set 8516 GOVD_WRITTEN if they are GOVD_SHARED in some outer context 8517 for those. */ 8518 8519 static tree 8520 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data) 8521 { 8522 struct walk_stmt_info *wi = (struct walk_stmt_info *) data; 8523 8524 *walk_subtrees = 0; 8525 if (!wi->is_lhs) 8526 return NULL_TREE; 8527 8528 tree op = *tp; 8529 do 8530 { 8531 if (handled_component_p (op)) 8532 op = TREE_OPERAND (op, 0); 8533 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF) 8534 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR) 8535 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0); 8536 else 8537 break; 8538 } 8539 while (1); 8540 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op)) 8541 return NULL_TREE; 8542 8543 omp_mark_stores (gimplify_omp_ctxp, op); 8544 return NULL_TREE; 8545 } 8546 8547 /* Helper callback for walk_gimple_seq to discover possible stores 8548 to omp_shared_to_firstprivate_optimizable_decl_p decls and set 8549 GOVD_WRITTEN if they are GOVD_SHARED in some outer context 8550 for those. */ 8551 8552 static tree 8553 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p, 8554 bool *handled_ops_p, 8555 struct walk_stmt_info *wi) 8556 { 8557 gimple *stmt = gsi_stmt (*gsi_p); 8558 switch (gimple_code (stmt)) 8559 { 8560 /* Don't recurse on OpenMP constructs for which 8561 gimplify_adjust_omp_clauses already handled the bodies, 8562 except handle gimple_omp_for_pre_body. */ 8563 case GIMPLE_OMP_FOR: 8564 *handled_ops_p = true; 8565 if (gimple_omp_for_pre_body (stmt)) 8566 walk_gimple_seq (gimple_omp_for_pre_body (stmt), 8567 omp_find_stores_stmt, omp_find_stores_op, wi); 8568 break; 8569 case GIMPLE_OMP_PARALLEL: 8570 case GIMPLE_OMP_TASK: 8571 case GIMPLE_OMP_SECTIONS: 8572 case GIMPLE_OMP_SINGLE: 8573 case GIMPLE_OMP_TARGET: 8574 case GIMPLE_OMP_TEAMS: 8575 case GIMPLE_OMP_CRITICAL: 8576 *handled_ops_p = true; 8577 break; 8578 default: 8579 break; 8580 } 8581 return NULL_TREE; 8582 } 8583 8584 struct gimplify_adjust_omp_clauses_data 8585 { 8586 tree *list_p; 8587 gimple_seq *pre_p; 8588 }; 8589 8590 /* For all variables that were not actually used within the context, 8591 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */ 8592 8593 static int 8594 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data) 8595 { 8596 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p; 8597 gimple_seq *pre_p 8598 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p; 8599 tree decl = (tree) n->key; 8600 unsigned flags = n->value; 8601 enum omp_clause_code code; 8602 tree clause; 8603 bool private_debug; 8604 8605 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL)) 8606 return 0; 8607 if ((flags & GOVD_SEEN) == 0) 8608 return 0; 8609 if (flags & GOVD_DEBUG_PRIVATE) 8610 { 8611 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED); 8612 private_debug = true; 8613 } 8614 else if (flags & GOVD_MAP) 8615 private_debug = false; 8616 else 8617 private_debug 8618 = lang_hooks.decls.omp_private_debug_clause (decl, 8619 !!(flags & GOVD_SHARED)); 8620 if (private_debug) 8621 code = OMP_CLAUSE_PRIVATE; 8622 else if (flags & GOVD_MAP) 8623 { 8624 code = OMP_CLAUSE_MAP; 8625 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0 8626 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl)))) 8627 { 8628 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl); 8629 return 0; 8630 } 8631 } 8632 else if (flags & GOVD_SHARED) 8633 { 8634 if (is_global_var (decl)) 8635 { 8636 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context; 8637 while (ctx != NULL) 8638 { 8639 splay_tree_node on 8640 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 8641 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE 8642 | GOVD_PRIVATE | GOVD_REDUCTION 8643 | GOVD_LINEAR | GOVD_MAP)) != 0) 8644 break; 8645 ctx = ctx->outer_context; 8646 } 8647 if (ctx == NULL) 8648 return 0; 8649 } 8650 code = OMP_CLAUSE_SHARED; 8651 } 8652 else if (flags & GOVD_PRIVATE) 8653 code = OMP_CLAUSE_PRIVATE; 8654 else if (flags & GOVD_FIRSTPRIVATE) 8655 { 8656 code = OMP_CLAUSE_FIRSTPRIVATE; 8657 if ((gimplify_omp_ctxp->region_type & ORT_TARGET) 8658 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0 8659 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl)))) 8660 { 8661 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on " 8662 "%<target%> construct", decl); 8663 return 0; 8664 } 8665 } 8666 else if (flags & GOVD_LASTPRIVATE) 8667 code = OMP_CLAUSE_LASTPRIVATE; 8668 else if (flags & GOVD_ALIGNED) 8669 return 0; 8670 else 8671 gcc_unreachable (); 8672 8673 if (((flags & GOVD_LASTPRIVATE) 8674 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN))) 8675 && omp_shared_to_firstprivate_optimizable_decl_p (decl)) 8676 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl); 8677 8678 tree chain = *list_p; 8679 clause = build_omp_clause (input_location, code); 8680 OMP_CLAUSE_DECL (clause) = decl; 8681 OMP_CLAUSE_CHAIN (clause) = chain; 8682 if (private_debug) 8683 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1; 8684 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF)) 8685 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1; 8686 else if (code == OMP_CLAUSE_SHARED 8687 && (flags & GOVD_WRITTEN) == 0 8688 && omp_shared_to_firstprivate_optimizable_decl_p (decl)) 8689 OMP_CLAUSE_SHARED_READONLY (clause) = 1; 8690 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0) 8691 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1; 8692 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0) 8693 { 8694 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP); 8695 OMP_CLAUSE_DECL (nc) = decl; 8696 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE 8697 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE) 8698 OMP_CLAUSE_DECL (clause) 8699 = build_simple_mem_ref_loc (input_location, decl); 8700 OMP_CLAUSE_DECL (clause) 8701 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause), 8702 build_int_cst (build_pointer_type (char_type_node), 0)); 8703 OMP_CLAUSE_SIZE (clause) = size_zero_node; 8704 OMP_CLAUSE_SIZE (nc) = size_zero_node; 8705 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC); 8706 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1; 8707 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER); 8708 OMP_CLAUSE_CHAIN (nc) = chain; 8709 OMP_CLAUSE_CHAIN (clause) = nc; 8710 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 8711 gimplify_omp_ctxp = ctx->outer_context; 8712 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0), 8713 pre_p, NULL, is_gimple_val, fb_rvalue); 8714 gimplify_omp_ctxp = ctx; 8715 } 8716 else if (code == OMP_CLAUSE_MAP) 8717 { 8718 int kind = (flags & GOVD_MAP_TO_ONLY 8719 ? GOMP_MAP_TO 8720 : GOMP_MAP_TOFROM); 8721 if (flags & GOVD_MAP_FORCE) 8722 kind |= GOMP_MAP_FLAG_FORCE; 8723 OMP_CLAUSE_SET_MAP_KIND (clause, kind); 8724 if (DECL_SIZE (decl) 8725 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 8726 { 8727 tree decl2 = DECL_VALUE_EXPR (decl); 8728 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); 8729 decl2 = TREE_OPERAND (decl2, 0); 8730 gcc_assert (DECL_P (decl2)); 8731 tree mem = build_simple_mem_ref (decl2); 8732 OMP_CLAUSE_DECL (clause) = mem; 8733 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl)); 8734 if (gimplify_omp_ctxp->outer_context) 8735 { 8736 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context; 8737 omp_notice_variable (ctx, decl2, true); 8738 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true); 8739 } 8740 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause), 8741 OMP_CLAUSE_MAP); 8742 OMP_CLAUSE_DECL (nc) = decl; 8743 OMP_CLAUSE_SIZE (nc) = size_zero_node; 8744 if (gimplify_omp_ctxp->target_firstprivatize_array_bases) 8745 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER); 8746 else 8747 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER); 8748 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause); 8749 OMP_CLAUSE_CHAIN (clause) = nc; 8750 } 8751 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases 8752 && lang_hooks.decls.omp_privatize_by_reference (decl)) 8753 { 8754 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl); 8755 OMP_CLAUSE_SIZE (clause) 8756 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))); 8757 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 8758 gimplify_omp_ctxp = ctx->outer_context; 8759 gimplify_expr (&OMP_CLAUSE_SIZE (clause), 8760 pre_p, NULL, is_gimple_val, fb_rvalue); 8761 gimplify_omp_ctxp = ctx; 8762 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause), 8763 OMP_CLAUSE_MAP); 8764 OMP_CLAUSE_DECL (nc) = decl; 8765 OMP_CLAUSE_SIZE (nc) = size_zero_node; 8766 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE); 8767 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause); 8768 OMP_CLAUSE_CHAIN (clause) = nc; 8769 } 8770 else 8771 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl); 8772 } 8773 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0) 8774 { 8775 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE); 8776 OMP_CLAUSE_DECL (nc) = decl; 8777 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1; 8778 OMP_CLAUSE_CHAIN (nc) = chain; 8779 OMP_CLAUSE_CHAIN (clause) = nc; 8780 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 8781 gimplify_omp_ctxp = ctx->outer_context; 8782 lang_hooks.decls.omp_finish_clause (nc, pre_p); 8783 gimplify_omp_ctxp = ctx; 8784 } 8785 *list_p = clause; 8786 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 8787 gimplify_omp_ctxp = ctx->outer_context; 8788 lang_hooks.decls.omp_finish_clause (clause, pre_p); 8789 if (gimplify_omp_ctxp) 8790 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause)) 8791 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP 8792 && DECL_P (OMP_CLAUSE_SIZE (clause))) 8793 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause), 8794 true); 8795 gimplify_omp_ctxp = ctx; 8796 return 0; 8797 } 8798 8799 static void 8800 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p, 8801 enum tree_code code) 8802 { 8803 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 8804 tree c, decl; 8805 8806 if (body) 8807 { 8808 struct gimplify_omp_ctx *octx; 8809 for (octx = ctx; octx; octx = octx->outer_context) 8810 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0) 8811 break; 8812 if (octx) 8813 { 8814 struct walk_stmt_info wi; 8815 memset (&wi, 0, sizeof (wi)); 8816 walk_gimple_seq (body, omp_find_stores_stmt, 8817 omp_find_stores_op, &wi); 8818 } 8819 } 8820 8821 if (ctx->add_safelen1) 8822 { 8823 /* If there are VLAs in the body of simd loop, prevent 8824 vectorization. */ 8825 gcc_assert (ctx->region_type == ORT_SIMD); 8826 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN); 8827 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node; 8828 OMP_CLAUSE_CHAIN (c) = *list_p; 8829 *list_p = c; 8830 list_p = &OMP_CLAUSE_CHAIN (c); 8831 } 8832 8833 while ((c = *list_p) != NULL) 8834 { 8835 splay_tree_node n; 8836 bool remove = false; 8837 8838 switch (OMP_CLAUSE_CODE (c)) 8839 { 8840 case OMP_CLAUSE_FIRSTPRIVATE: 8841 if ((ctx->region_type & ORT_TARGET) 8842 && (ctx->region_type & ORT_ACC) == 0 8843 && TYPE_ATOMIC (strip_array_types 8844 (TREE_TYPE (OMP_CLAUSE_DECL (c))))) 8845 { 8846 error_at (OMP_CLAUSE_LOCATION (c), 8847 "%<_Atomic%> %qD in %<firstprivate%> clause on " 8848 "%<target%> construct", OMP_CLAUSE_DECL (c)); 8849 remove = true; 8850 break; 8851 } 8852 /* FALLTHRU */ 8853 case OMP_CLAUSE_PRIVATE: 8854 case OMP_CLAUSE_SHARED: 8855 case OMP_CLAUSE_LINEAR: 8856 decl = OMP_CLAUSE_DECL (c); 8857 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 8858 remove = !(n->value & GOVD_SEEN); 8859 if (! remove) 8860 { 8861 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED; 8862 if ((n->value & GOVD_DEBUG_PRIVATE) 8863 || lang_hooks.decls.omp_private_debug_clause (decl, shared)) 8864 { 8865 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0 8866 || ((n->value & GOVD_DATA_SHARE_CLASS) 8867 == GOVD_SHARED)); 8868 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE); 8869 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1; 8870 } 8871 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED 8872 && (n->value & GOVD_WRITTEN) == 0 8873 && DECL_P (decl) 8874 && omp_shared_to_firstprivate_optimizable_decl_p (decl)) 8875 OMP_CLAUSE_SHARED_READONLY (c) = 1; 8876 else if (DECL_P (decl) 8877 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED 8878 && (n->value & GOVD_WRITTEN) != 1) 8879 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 8880 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))) 8881 && omp_shared_to_firstprivate_optimizable_decl_p (decl)) 8882 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl); 8883 } 8884 break; 8885 8886 case OMP_CLAUSE_LASTPRIVATE: 8887 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to 8888 accurately reflect the presence of a FIRSTPRIVATE clause. */ 8889 decl = OMP_CLAUSE_DECL (c); 8890 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 8891 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) 8892 = (n->value & GOVD_FIRSTPRIVATE) != 0; 8893 if (code == OMP_DISTRIBUTE 8894 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)) 8895 { 8896 remove = true; 8897 error_at (OMP_CLAUSE_LOCATION (c), 8898 "same variable used in %<firstprivate%> and " 8899 "%<lastprivate%> clauses on %<distribute%> " 8900 "construct"); 8901 } 8902 if (!remove 8903 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 8904 && DECL_P (decl) 8905 && omp_shared_to_firstprivate_optimizable_decl_p (decl)) 8906 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl); 8907 break; 8908 8909 case OMP_CLAUSE_ALIGNED: 8910 decl = OMP_CLAUSE_DECL (c); 8911 if (!is_global_var (decl)) 8912 { 8913 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 8914 remove = n == NULL || !(n->value & GOVD_SEEN); 8915 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE) 8916 { 8917 struct gimplify_omp_ctx *octx; 8918 if (n != NULL 8919 && (n->value & (GOVD_DATA_SHARE_CLASS 8920 & ~GOVD_FIRSTPRIVATE))) 8921 remove = true; 8922 else 8923 for (octx = ctx->outer_context; octx; 8924 octx = octx->outer_context) 8925 { 8926 n = splay_tree_lookup (octx->variables, 8927 (splay_tree_key) decl); 8928 if (n == NULL) 8929 continue; 8930 if (n->value & GOVD_LOCAL) 8931 break; 8932 /* We have to avoid assigning a shared variable 8933 to itself when trying to add 8934 __builtin_assume_aligned. */ 8935 if (n->value & GOVD_SHARED) 8936 { 8937 remove = true; 8938 break; 8939 } 8940 } 8941 } 8942 } 8943 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) 8944 { 8945 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 8946 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0) 8947 remove = true; 8948 } 8949 break; 8950 8951 case OMP_CLAUSE_MAP: 8952 if (code == OMP_TARGET_EXIT_DATA 8953 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER) 8954 { 8955 remove = true; 8956 break; 8957 } 8958 decl = OMP_CLAUSE_DECL (c); 8959 /* Data clauses associated with acc parallel reductions must be 8960 compatible with present_or_copy. Warn and adjust the clause 8961 if that is not the case. */ 8962 if (ctx->region_type == ORT_ACC_PARALLEL) 8963 { 8964 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0); 8965 n = NULL; 8966 8967 if (DECL_P (t)) 8968 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t); 8969 8970 if (n && (n->value & GOVD_REDUCTION)) 8971 { 8972 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c); 8973 8974 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1; 8975 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM 8976 && kind != GOMP_MAP_FORCE_PRESENT 8977 && kind != GOMP_MAP_POINTER) 8978 { 8979 warning_at (OMP_CLAUSE_LOCATION (c), 0, 8980 "incompatible data clause with reduction " 8981 "on %qE; promoting to present_or_copy", 8982 DECL_NAME (t)); 8983 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM); 8984 } 8985 } 8986 } 8987 if (!DECL_P (decl)) 8988 { 8989 if ((ctx->region_type & ORT_TARGET) != 0 8990 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER) 8991 { 8992 if (TREE_CODE (decl) == INDIRECT_REF 8993 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF 8994 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0))) 8995 == REFERENCE_TYPE)) 8996 decl = TREE_OPERAND (decl, 0); 8997 if (TREE_CODE (decl) == COMPONENT_REF) 8998 { 8999 while (TREE_CODE (decl) == COMPONENT_REF) 9000 decl = TREE_OPERAND (decl, 0); 9001 if (DECL_P (decl)) 9002 { 9003 n = splay_tree_lookup (ctx->variables, 9004 (splay_tree_key) decl); 9005 if (!(n->value & GOVD_SEEN)) 9006 remove = true; 9007 } 9008 } 9009 } 9010 break; 9011 } 9012 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 9013 if ((ctx->region_type & ORT_TARGET) != 0 9014 && !(n->value & GOVD_SEEN) 9015 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0 9016 && (!is_global_var (decl) 9017 || !lookup_attribute ("omp declare target link", 9018 DECL_ATTRIBUTES (decl)))) 9019 { 9020 remove = true; 9021 /* For struct element mapping, if struct is never referenced 9022 in target block and none of the mapping has always modifier, 9023 remove all the struct element mappings, which immediately 9024 follow the GOMP_MAP_STRUCT map clause. */ 9025 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT) 9026 { 9027 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c)); 9028 while (cnt--) 9029 OMP_CLAUSE_CHAIN (c) 9030 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c)); 9031 } 9032 } 9033 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT 9034 && code == OMP_TARGET_EXIT_DATA) 9035 remove = true; 9036 else if (DECL_SIZE (decl) 9037 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST 9038 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER 9039 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER 9040 && (OMP_CLAUSE_MAP_KIND (c) 9041 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)) 9042 { 9043 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because 9044 for these, TREE_CODE (DECL_SIZE (decl)) will always be 9045 INTEGER_CST. */ 9046 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR); 9047 9048 tree decl2 = DECL_VALUE_EXPR (decl); 9049 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); 9050 decl2 = TREE_OPERAND (decl2, 0); 9051 gcc_assert (DECL_P (decl2)); 9052 tree mem = build_simple_mem_ref (decl2); 9053 OMP_CLAUSE_DECL (c) = mem; 9054 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl)); 9055 if (ctx->outer_context) 9056 { 9057 omp_notice_variable (ctx->outer_context, decl2, true); 9058 omp_notice_variable (ctx->outer_context, 9059 OMP_CLAUSE_SIZE (c), true); 9060 } 9061 if (((ctx->region_type & ORT_TARGET) != 0 9062 || !ctx->target_firstprivatize_array_bases) 9063 && ((n->value & GOVD_SEEN) == 0 9064 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0)) 9065 { 9066 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c), 9067 OMP_CLAUSE_MAP); 9068 OMP_CLAUSE_DECL (nc) = decl; 9069 OMP_CLAUSE_SIZE (nc) = size_zero_node; 9070 if (ctx->target_firstprivatize_array_bases) 9071 OMP_CLAUSE_SET_MAP_KIND (nc, 9072 GOMP_MAP_FIRSTPRIVATE_POINTER); 9073 else 9074 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER); 9075 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c); 9076 OMP_CLAUSE_CHAIN (c) = nc; 9077 c = nc; 9078 } 9079 } 9080 else 9081 { 9082 if (OMP_CLAUSE_SIZE (c) == NULL_TREE) 9083 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl); 9084 gcc_assert ((n->value & GOVD_SEEN) == 0 9085 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) 9086 == 0)); 9087 } 9088 break; 9089 9090 case OMP_CLAUSE_TO: 9091 case OMP_CLAUSE_FROM: 9092 case OMP_CLAUSE__CACHE_: 9093 decl = OMP_CLAUSE_DECL (c); 9094 if (!DECL_P (decl)) 9095 break; 9096 if (DECL_SIZE (decl) 9097 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 9098 { 9099 tree decl2 = DECL_VALUE_EXPR (decl); 9100 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); 9101 decl2 = TREE_OPERAND (decl2, 0); 9102 gcc_assert (DECL_P (decl2)); 9103 tree mem = build_simple_mem_ref (decl2); 9104 OMP_CLAUSE_DECL (c) = mem; 9105 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl)); 9106 if (ctx->outer_context) 9107 { 9108 omp_notice_variable (ctx->outer_context, decl2, true); 9109 omp_notice_variable (ctx->outer_context, 9110 OMP_CLAUSE_SIZE (c), true); 9111 } 9112 } 9113 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE) 9114 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl); 9115 break; 9116 9117 case OMP_CLAUSE_REDUCTION: 9118 decl = OMP_CLAUSE_DECL (c); 9119 /* OpenACC reductions need a present_or_copy data clause. 9120 Add one if necessary. Error is the reduction is private. */ 9121 if (ctx->region_type == ORT_ACC_PARALLEL) 9122 { 9123 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 9124 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) 9125 error_at (OMP_CLAUSE_LOCATION (c), "invalid private " 9126 "reduction on %qE", DECL_NAME (decl)); 9127 else if ((n->value & GOVD_MAP) == 0) 9128 { 9129 tree next = OMP_CLAUSE_CHAIN (c); 9130 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP); 9131 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM); 9132 OMP_CLAUSE_DECL (nc) = decl; 9133 OMP_CLAUSE_CHAIN (c) = nc; 9134 lang_hooks.decls.omp_finish_clause (nc, pre_p); 9135 while (1) 9136 { 9137 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1; 9138 if (OMP_CLAUSE_CHAIN (nc) == NULL) 9139 break; 9140 nc = OMP_CLAUSE_CHAIN (nc); 9141 } 9142 OMP_CLAUSE_CHAIN (nc) = next; 9143 n->value |= GOVD_MAP; 9144 } 9145 } 9146 if (DECL_P (decl) 9147 && omp_shared_to_firstprivate_optimizable_decl_p (decl)) 9148 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl); 9149 break; 9150 case OMP_CLAUSE_COPYIN: 9151 case OMP_CLAUSE_COPYPRIVATE: 9152 case OMP_CLAUSE_IF: 9153 case OMP_CLAUSE_NUM_THREADS: 9154 case OMP_CLAUSE_NUM_TEAMS: 9155 case OMP_CLAUSE_THREAD_LIMIT: 9156 case OMP_CLAUSE_DIST_SCHEDULE: 9157 case OMP_CLAUSE_DEVICE: 9158 case OMP_CLAUSE_SCHEDULE: 9159 case OMP_CLAUSE_NOWAIT: 9160 case OMP_CLAUSE_ORDERED: 9161 case OMP_CLAUSE_DEFAULT: 9162 case OMP_CLAUSE_UNTIED: 9163 case OMP_CLAUSE_COLLAPSE: 9164 case OMP_CLAUSE_FINAL: 9165 case OMP_CLAUSE_MERGEABLE: 9166 case OMP_CLAUSE_PROC_BIND: 9167 case OMP_CLAUSE_SAFELEN: 9168 case OMP_CLAUSE_SIMDLEN: 9169 case OMP_CLAUSE_DEPEND: 9170 case OMP_CLAUSE_PRIORITY: 9171 case OMP_CLAUSE_GRAINSIZE: 9172 case OMP_CLAUSE_NUM_TASKS: 9173 case OMP_CLAUSE_NOGROUP: 9174 case OMP_CLAUSE_THREADS: 9175 case OMP_CLAUSE_SIMD: 9176 case OMP_CLAUSE_HINT: 9177 case OMP_CLAUSE_DEFAULTMAP: 9178 case OMP_CLAUSE_USE_DEVICE_PTR: 9179 case OMP_CLAUSE_IS_DEVICE_PTR: 9180 case OMP_CLAUSE__CILK_FOR_COUNT_: 9181 case OMP_CLAUSE_ASYNC: 9182 case OMP_CLAUSE_WAIT: 9183 case OMP_CLAUSE_INDEPENDENT: 9184 case OMP_CLAUSE_NUM_GANGS: 9185 case OMP_CLAUSE_NUM_WORKERS: 9186 case OMP_CLAUSE_VECTOR_LENGTH: 9187 case OMP_CLAUSE_GANG: 9188 case OMP_CLAUSE_WORKER: 9189 case OMP_CLAUSE_VECTOR: 9190 case OMP_CLAUSE_AUTO: 9191 case OMP_CLAUSE_SEQ: 9192 case OMP_CLAUSE_TILE: 9193 break; 9194 9195 default: 9196 gcc_unreachable (); 9197 } 9198 9199 if (remove) 9200 *list_p = OMP_CLAUSE_CHAIN (c); 9201 else 9202 list_p = &OMP_CLAUSE_CHAIN (c); 9203 } 9204 9205 /* Add in any implicit data sharing. */ 9206 struct gimplify_adjust_omp_clauses_data data; 9207 data.list_p = list_p; 9208 data.pre_p = pre_p; 9209 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data); 9210 9211 gimplify_omp_ctxp = ctx->outer_context; 9212 delete_omp_context (ctx); 9213 } 9214 9215 /* Gimplify OACC_CACHE. */ 9216 9217 static void 9218 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p) 9219 { 9220 tree expr = *expr_p; 9221 9222 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC, 9223 OACC_CACHE); 9224 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr), 9225 OACC_CACHE); 9226 9227 /* TODO: Do something sensible with this information. */ 9228 9229 *expr_p = NULL_TREE; 9230 } 9231 9232 /* Helper function of gimplify_oacc_declare. The helper's purpose is to, 9233 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit' 9234 kind. The entry kind will replace the one in CLAUSE, while the exit 9235 kind will be used in a new omp_clause and returned to the caller. */ 9236 9237 static tree 9238 gimplify_oacc_declare_1 (tree clause) 9239 { 9240 HOST_WIDE_INT kind, new_op; 9241 bool ret = false; 9242 tree c = NULL; 9243 9244 kind = OMP_CLAUSE_MAP_KIND (clause); 9245 9246 switch (kind) 9247 { 9248 case GOMP_MAP_ALLOC: 9249 case GOMP_MAP_FORCE_ALLOC: 9250 case GOMP_MAP_FORCE_TO: 9251 new_op = GOMP_MAP_DELETE; 9252 ret = true; 9253 break; 9254 9255 case GOMP_MAP_FORCE_FROM: 9256 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC); 9257 new_op = GOMP_MAP_FORCE_FROM; 9258 ret = true; 9259 break; 9260 9261 case GOMP_MAP_FORCE_TOFROM: 9262 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO); 9263 new_op = GOMP_MAP_FORCE_FROM; 9264 ret = true; 9265 break; 9266 9267 case GOMP_MAP_FROM: 9268 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC); 9269 new_op = GOMP_MAP_FROM; 9270 ret = true; 9271 break; 9272 9273 case GOMP_MAP_TOFROM: 9274 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO); 9275 new_op = GOMP_MAP_FROM; 9276 ret = true; 9277 break; 9278 9279 case GOMP_MAP_DEVICE_RESIDENT: 9280 case GOMP_MAP_FORCE_DEVICEPTR: 9281 case GOMP_MAP_FORCE_PRESENT: 9282 case GOMP_MAP_LINK: 9283 case GOMP_MAP_POINTER: 9284 case GOMP_MAP_TO: 9285 break; 9286 9287 default: 9288 gcc_unreachable (); 9289 break; 9290 } 9291 9292 if (ret) 9293 { 9294 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP); 9295 OMP_CLAUSE_SET_MAP_KIND (c, new_op); 9296 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause); 9297 } 9298 9299 return c; 9300 } 9301 9302 /* Gimplify OACC_DECLARE. */ 9303 9304 static void 9305 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p) 9306 { 9307 tree expr = *expr_p; 9308 gomp_target *stmt; 9309 tree clauses, t, decl; 9310 9311 clauses = OACC_DECLARE_CLAUSES (expr); 9312 9313 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE); 9314 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE); 9315 9316 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t)) 9317 { 9318 decl = OMP_CLAUSE_DECL (t); 9319 9320 if (TREE_CODE (decl) == MEM_REF) 9321 decl = TREE_OPERAND (decl, 0); 9322 9323 if (VAR_P (decl) && !is_oacc_declared (decl)) 9324 { 9325 tree attr = get_identifier ("oacc declare target"); 9326 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE, 9327 DECL_ATTRIBUTES (decl)); 9328 } 9329 9330 if (VAR_P (decl) 9331 && !is_global_var (decl) 9332 && DECL_CONTEXT (decl) == current_function_decl) 9333 { 9334 tree c = gimplify_oacc_declare_1 (t); 9335 if (c) 9336 { 9337 if (oacc_declare_returns == NULL) 9338 oacc_declare_returns = new hash_map<tree, tree>; 9339 9340 oacc_declare_returns->put (decl, c); 9341 } 9342 } 9343 9344 if (gimplify_omp_ctxp) 9345 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN); 9346 } 9347 9348 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE, 9349 clauses); 9350 9351 gimplify_seq_add_stmt (pre_p, stmt); 9352 9353 *expr_p = NULL_TREE; 9354 } 9355 9356 /* Gimplify the contents of an OMP_PARALLEL statement. This involves 9357 gimplification of the body, as well as scanning the body for used 9358 variables. We need to do this scan now, because variable-sized 9359 decls will be decomposed during gimplification. */ 9360 9361 static void 9362 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p) 9363 { 9364 tree expr = *expr_p; 9365 gimple *g; 9366 gimple_seq body = NULL; 9367 9368 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, 9369 OMP_PARALLEL_COMBINED (expr) 9370 ? ORT_COMBINED_PARALLEL 9371 : ORT_PARALLEL, OMP_PARALLEL); 9372 9373 push_gimplify_context (); 9374 9375 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body); 9376 if (gimple_code (g) == GIMPLE_BIND) 9377 pop_gimplify_context (g); 9378 else 9379 pop_gimplify_context (NULL); 9380 9381 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr), 9382 OMP_PARALLEL); 9383 9384 g = gimple_build_omp_parallel (body, 9385 OMP_PARALLEL_CLAUSES (expr), 9386 NULL_TREE, NULL_TREE); 9387 if (OMP_PARALLEL_COMBINED (expr)) 9388 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED); 9389 gimplify_seq_add_stmt (pre_p, g); 9390 *expr_p = NULL_TREE; 9391 } 9392 9393 /* Gimplify the contents of an OMP_TASK statement. This involves 9394 gimplification of the body, as well as scanning the body for used 9395 variables. We need to do this scan now, because variable-sized 9396 decls will be decomposed during gimplification. */ 9397 9398 static void 9399 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p) 9400 { 9401 tree expr = *expr_p; 9402 gimple *g; 9403 gimple_seq body = NULL; 9404 9405 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, 9406 omp_find_clause (OMP_TASK_CLAUSES (expr), 9407 OMP_CLAUSE_UNTIED) 9408 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK); 9409 9410 push_gimplify_context (); 9411 9412 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body); 9413 if (gimple_code (g) == GIMPLE_BIND) 9414 pop_gimplify_context (g); 9415 else 9416 pop_gimplify_context (NULL); 9417 9418 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr), 9419 OMP_TASK); 9420 9421 g = gimple_build_omp_task (body, 9422 OMP_TASK_CLAUSES (expr), 9423 NULL_TREE, NULL_TREE, 9424 NULL_TREE, NULL_TREE, NULL_TREE); 9425 gimplify_seq_add_stmt (pre_p, g); 9426 *expr_p = NULL_TREE; 9427 } 9428 9429 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD 9430 with non-NULL OMP_FOR_INIT. */ 9431 9432 static tree 9433 find_combined_omp_for (tree *tp, int *walk_subtrees, void *) 9434 { 9435 *walk_subtrees = 0; 9436 switch (TREE_CODE (*tp)) 9437 { 9438 case OMP_FOR: 9439 *walk_subtrees = 1; 9440 /* FALLTHRU */ 9441 case OMP_SIMD: 9442 if (OMP_FOR_INIT (*tp) != NULL_TREE) 9443 return *tp; 9444 break; 9445 case BIND_EXPR: 9446 case STATEMENT_LIST: 9447 case OMP_PARALLEL: 9448 *walk_subtrees = 1; 9449 break; 9450 default: 9451 break; 9452 } 9453 return NULL_TREE; 9454 } 9455 9456 /* Gimplify the gross structure of an OMP_FOR statement. */ 9457 9458 static enum gimplify_status 9459 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p) 9460 { 9461 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t; 9462 enum gimplify_status ret = GS_ALL_DONE; 9463 enum gimplify_status tret; 9464 gomp_for *gfor; 9465 gimple_seq for_body, for_pre_body; 9466 int i; 9467 bitmap has_decl_expr = NULL; 9468 enum omp_region_type ort = ORT_WORKSHARE; 9469 9470 orig_for_stmt = for_stmt = *expr_p; 9471 9472 switch (TREE_CODE (for_stmt)) 9473 { 9474 case OMP_FOR: 9475 case CILK_FOR: 9476 case OMP_DISTRIBUTE: 9477 break; 9478 case OACC_LOOP: 9479 ort = ORT_ACC; 9480 break; 9481 case OMP_TASKLOOP: 9482 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED)) 9483 ort = ORT_UNTIED_TASK; 9484 else 9485 ort = ORT_TASK; 9486 break; 9487 case OMP_SIMD: 9488 case CILK_SIMD: 9489 ort = ORT_SIMD; 9490 break; 9491 default: 9492 gcc_unreachable (); 9493 } 9494 9495 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear 9496 clause for the IV. */ 9497 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1) 9498 { 9499 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0); 9500 gcc_assert (TREE_CODE (t) == MODIFY_EXPR); 9501 decl = TREE_OPERAND (t, 0); 9502 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c)) 9503 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 9504 && OMP_CLAUSE_DECL (c) == decl) 9505 { 9506 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1; 9507 break; 9508 } 9509 } 9510 9511 if (OMP_FOR_INIT (for_stmt) == NULL_TREE) 9512 { 9513 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP); 9514 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), 9515 find_combined_omp_for, NULL, NULL); 9516 if (inner_for_stmt == NULL_TREE) 9517 { 9518 gcc_assert (seen_error ()); 9519 *expr_p = NULL_TREE; 9520 return GS_ERROR; 9521 } 9522 } 9523 9524 if (TREE_CODE (for_stmt) != OMP_TASKLOOP) 9525 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort, 9526 TREE_CODE (for_stmt)); 9527 9528 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE) 9529 gimplify_omp_ctxp->distribute = true; 9530 9531 /* Handle OMP_FOR_INIT. */ 9532 for_pre_body = NULL; 9533 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt)) 9534 { 9535 has_decl_expr = BITMAP_ALLOC (NULL); 9536 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR 9537 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt))) 9538 == VAR_DECL) 9539 { 9540 t = OMP_FOR_PRE_BODY (for_stmt); 9541 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t))); 9542 } 9543 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST) 9544 { 9545 tree_stmt_iterator si; 9546 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si); 9547 tsi_next (&si)) 9548 { 9549 t = tsi_stmt (si); 9550 if (TREE_CODE (t) == DECL_EXPR 9551 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL) 9552 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t))); 9553 } 9554 } 9555 } 9556 if (OMP_FOR_PRE_BODY (for_stmt)) 9557 { 9558 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp) 9559 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body); 9560 else 9561 { 9562 struct gimplify_omp_ctx ctx; 9563 memset (&ctx, 0, sizeof (ctx)); 9564 ctx.region_type = ORT_NONE; 9565 gimplify_omp_ctxp = &ctx; 9566 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body); 9567 gimplify_omp_ctxp = NULL; 9568 } 9569 } 9570 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE; 9571 9572 if (OMP_FOR_INIT (for_stmt) == NULL_TREE) 9573 for_stmt = inner_for_stmt; 9574 9575 /* For taskloop, need to gimplify the start, end and step before the 9576 taskloop, outside of the taskloop omp context. */ 9577 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP) 9578 { 9579 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) 9580 { 9581 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); 9582 if (!is_gimple_constant (TREE_OPERAND (t, 1))) 9583 { 9584 tree type = TREE_TYPE (TREE_OPERAND (t, 0)); 9585 TREE_OPERAND (t, 1) 9586 = get_initialized_tmp_var (TREE_OPERAND (t, 1), 9587 gimple_seq_empty_p (for_pre_body) 9588 ? pre_p : &for_pre_body, NULL, 9589 false); 9590 /* Reference to pointer conversion is considered useless, 9591 but is significant for firstprivate clause. Force it 9592 here. */ 9593 if (TREE_CODE (type) == POINTER_TYPE 9594 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1))) 9595 == REFERENCE_TYPE)) 9596 { 9597 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type)); 9598 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, 9599 TREE_OPERAND (t, 1)); 9600 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body) 9601 ? pre_p : &for_pre_body); 9602 TREE_OPERAND (t, 1) = v; 9603 } 9604 tree c = build_omp_clause (input_location, 9605 OMP_CLAUSE_FIRSTPRIVATE); 9606 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1); 9607 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt); 9608 OMP_FOR_CLAUSES (orig_for_stmt) = c; 9609 } 9610 9611 /* Handle OMP_FOR_COND. */ 9612 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i); 9613 if (!is_gimple_constant (TREE_OPERAND (t, 1))) 9614 { 9615 tree type = TREE_TYPE (TREE_OPERAND (t, 0)); 9616 TREE_OPERAND (t, 1) 9617 = get_initialized_tmp_var (TREE_OPERAND (t, 1), 9618 gimple_seq_empty_p (for_pre_body) 9619 ? pre_p : &for_pre_body, NULL, 9620 false); 9621 /* Reference to pointer conversion is considered useless, 9622 but is significant for firstprivate clause. Force it 9623 here. */ 9624 if (TREE_CODE (type) == POINTER_TYPE 9625 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1))) 9626 == REFERENCE_TYPE)) 9627 { 9628 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type)); 9629 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, 9630 TREE_OPERAND (t, 1)); 9631 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body) 9632 ? pre_p : &for_pre_body); 9633 TREE_OPERAND (t, 1) = v; 9634 } 9635 tree c = build_omp_clause (input_location, 9636 OMP_CLAUSE_FIRSTPRIVATE); 9637 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1); 9638 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt); 9639 OMP_FOR_CLAUSES (orig_for_stmt) = c; 9640 } 9641 9642 /* Handle OMP_FOR_INCR. */ 9643 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 9644 if (TREE_CODE (t) == MODIFY_EXPR) 9645 { 9646 decl = TREE_OPERAND (t, 0); 9647 t = TREE_OPERAND (t, 1); 9648 tree *tp = &TREE_OPERAND (t, 1); 9649 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl) 9650 tp = &TREE_OPERAND (t, 0); 9651 9652 if (!is_gimple_constant (*tp)) 9653 { 9654 gimple_seq *seq = gimple_seq_empty_p (for_pre_body) 9655 ? pre_p : &for_pre_body; 9656 *tp = get_initialized_tmp_var (*tp, seq, NULL, false); 9657 tree c = build_omp_clause (input_location, 9658 OMP_CLAUSE_FIRSTPRIVATE); 9659 OMP_CLAUSE_DECL (c) = *tp; 9660 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt); 9661 OMP_FOR_CLAUSES (orig_for_stmt) = c; 9662 } 9663 } 9664 } 9665 9666 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort, 9667 OMP_TASKLOOP); 9668 } 9669 9670 if (orig_for_stmt != for_stmt) 9671 gimplify_omp_ctxp->combined_loop = true; 9672 9673 for_body = NULL; 9674 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) 9675 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt))); 9676 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) 9677 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt))); 9678 9679 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED); 9680 bool is_doacross = false; 9681 if (c && OMP_CLAUSE_ORDERED_EXPR (c)) 9682 { 9683 is_doacross = true; 9684 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH 9685 (OMP_FOR_INIT (for_stmt)) 9686 * 2); 9687 } 9688 int collapse = 1, tile = 0; 9689 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE); 9690 if (c) 9691 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c)); 9692 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE); 9693 if (c) 9694 tile = list_length (OMP_CLAUSE_TILE_LIST (c)); 9695 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) 9696 { 9697 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); 9698 gcc_assert (TREE_CODE (t) == MODIFY_EXPR); 9699 decl = TREE_OPERAND (t, 0); 9700 gcc_assert (DECL_P (decl)); 9701 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)) 9702 || POINTER_TYPE_P (TREE_TYPE (decl))); 9703 if (is_doacross) 9704 { 9705 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt)) 9706 gimplify_omp_ctxp->loop_iter_var.quick_push 9707 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i)); 9708 else 9709 gimplify_omp_ctxp->loop_iter_var.quick_push (decl); 9710 gimplify_omp_ctxp->loop_iter_var.quick_push (decl); 9711 } 9712 9713 /* Make sure the iteration variable is private. */ 9714 tree c = NULL_TREE; 9715 tree c2 = NULL_TREE; 9716 if (orig_for_stmt != for_stmt) 9717 /* Do this only on innermost construct for combined ones. */; 9718 else if (ort == ORT_SIMD) 9719 { 9720 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables, 9721 (splay_tree_key) decl); 9722 omp_is_private (gimplify_omp_ctxp, decl, 9723 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) 9724 != 1)); 9725 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0) 9726 omp_notice_variable (gimplify_omp_ctxp, decl, true); 9727 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1) 9728 { 9729 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR); 9730 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1; 9731 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN; 9732 if (has_decl_expr 9733 && bitmap_bit_p (has_decl_expr, DECL_UID (decl))) 9734 { 9735 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1; 9736 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER; 9737 } 9738 struct gimplify_omp_ctx *outer 9739 = gimplify_omp_ctxp->outer_context; 9740 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 9741 { 9742 if (outer->region_type == ORT_WORKSHARE 9743 && outer->combined_loop) 9744 { 9745 n = splay_tree_lookup (outer->variables, 9746 (splay_tree_key)decl); 9747 if (n != NULL && (n->value & GOVD_LOCAL) != 0) 9748 { 9749 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1; 9750 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER; 9751 } 9752 else 9753 { 9754 struct gimplify_omp_ctx *octx = outer->outer_context; 9755 if (octx 9756 && octx->region_type == ORT_COMBINED_PARALLEL 9757 && octx->outer_context 9758 && (octx->outer_context->region_type 9759 == ORT_WORKSHARE) 9760 && octx->outer_context->combined_loop) 9761 { 9762 octx = octx->outer_context; 9763 n = splay_tree_lookup (octx->variables, 9764 (splay_tree_key)decl); 9765 if (n != NULL && (n->value & GOVD_LOCAL) != 0) 9766 { 9767 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1; 9768 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER; 9769 } 9770 } 9771 } 9772 } 9773 } 9774 9775 OMP_CLAUSE_DECL (c) = decl; 9776 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt); 9777 OMP_FOR_CLAUSES (for_stmt) = c; 9778 omp_add_variable (gimplify_omp_ctxp, decl, flags); 9779 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 9780 { 9781 if (outer->region_type == ORT_WORKSHARE 9782 && outer->combined_loop) 9783 { 9784 if (outer->outer_context 9785 && (outer->outer_context->region_type 9786 == ORT_COMBINED_PARALLEL)) 9787 outer = outer->outer_context; 9788 else if (omp_check_private (outer, decl, false)) 9789 outer = NULL; 9790 } 9791 else if (((outer->region_type & ORT_TASK) != 0) 9792 && outer->combined_loop 9793 && !omp_check_private (gimplify_omp_ctxp, 9794 decl, false)) 9795 ; 9796 else if (outer->region_type != ORT_COMBINED_PARALLEL) 9797 { 9798 omp_notice_variable (outer, decl, true); 9799 outer = NULL; 9800 } 9801 if (outer) 9802 { 9803 n = splay_tree_lookup (outer->variables, 9804 (splay_tree_key)decl); 9805 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0) 9806 { 9807 omp_add_variable (outer, decl, 9808 GOVD_LASTPRIVATE | GOVD_SEEN); 9809 if (outer->region_type == ORT_COMBINED_PARALLEL 9810 && outer->outer_context 9811 && (outer->outer_context->region_type 9812 == ORT_WORKSHARE) 9813 && outer->outer_context->combined_loop) 9814 { 9815 outer = outer->outer_context; 9816 n = splay_tree_lookup (outer->variables, 9817 (splay_tree_key)decl); 9818 if (omp_check_private (outer, decl, false)) 9819 outer = NULL; 9820 else if (n == NULL 9821 || ((n->value & GOVD_DATA_SHARE_CLASS) 9822 == 0)) 9823 omp_add_variable (outer, decl, 9824 GOVD_LASTPRIVATE 9825 | GOVD_SEEN); 9826 else 9827 outer = NULL; 9828 } 9829 if (outer && outer->outer_context 9830 && (outer->outer_context->region_type 9831 == ORT_COMBINED_TEAMS)) 9832 { 9833 outer = outer->outer_context; 9834 n = splay_tree_lookup (outer->variables, 9835 (splay_tree_key)decl); 9836 if (n == NULL 9837 || (n->value & GOVD_DATA_SHARE_CLASS) == 0) 9838 omp_add_variable (outer, decl, 9839 GOVD_SHARED | GOVD_SEEN); 9840 else 9841 outer = NULL; 9842 } 9843 if (outer && outer->outer_context) 9844 omp_notice_variable (outer->outer_context, decl, 9845 true); 9846 } 9847 } 9848 } 9849 } 9850 else 9851 { 9852 bool lastprivate 9853 = (!has_decl_expr 9854 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl))); 9855 struct gimplify_omp_ctx *outer 9856 = gimplify_omp_ctxp->outer_context; 9857 if (outer && lastprivate) 9858 { 9859 if (outer->region_type == ORT_WORKSHARE 9860 && outer->combined_loop) 9861 { 9862 n = splay_tree_lookup (outer->variables, 9863 (splay_tree_key)decl); 9864 if (n != NULL && (n->value & GOVD_LOCAL) != 0) 9865 { 9866 lastprivate = false; 9867 outer = NULL; 9868 } 9869 else if (outer->outer_context 9870 && (outer->outer_context->region_type 9871 == ORT_COMBINED_PARALLEL)) 9872 outer = outer->outer_context; 9873 else if (omp_check_private (outer, decl, false)) 9874 outer = NULL; 9875 } 9876 else if (((outer->region_type & ORT_TASK) != 0) 9877 && outer->combined_loop 9878 && !omp_check_private (gimplify_omp_ctxp, 9879 decl, false)) 9880 ; 9881 else if (outer->region_type != ORT_COMBINED_PARALLEL) 9882 { 9883 omp_notice_variable (outer, decl, true); 9884 outer = NULL; 9885 } 9886 if (outer) 9887 { 9888 n = splay_tree_lookup (outer->variables, 9889 (splay_tree_key)decl); 9890 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0) 9891 { 9892 omp_add_variable (outer, decl, 9893 GOVD_LASTPRIVATE | GOVD_SEEN); 9894 if (outer->region_type == ORT_COMBINED_PARALLEL 9895 && outer->outer_context 9896 && (outer->outer_context->region_type 9897 == ORT_WORKSHARE) 9898 && outer->outer_context->combined_loop) 9899 { 9900 outer = outer->outer_context; 9901 n = splay_tree_lookup (outer->variables, 9902 (splay_tree_key)decl); 9903 if (omp_check_private (outer, decl, false)) 9904 outer = NULL; 9905 else if (n == NULL 9906 || ((n->value & GOVD_DATA_SHARE_CLASS) 9907 == 0)) 9908 omp_add_variable (outer, decl, 9909 GOVD_LASTPRIVATE 9910 | GOVD_SEEN); 9911 else 9912 outer = NULL; 9913 } 9914 if (outer && outer->outer_context 9915 && (outer->outer_context->region_type 9916 == ORT_COMBINED_TEAMS)) 9917 { 9918 outer = outer->outer_context; 9919 n = splay_tree_lookup (outer->variables, 9920 (splay_tree_key)decl); 9921 if (n == NULL 9922 || (n->value & GOVD_DATA_SHARE_CLASS) == 0) 9923 omp_add_variable (outer, decl, 9924 GOVD_SHARED | GOVD_SEEN); 9925 else 9926 outer = NULL; 9927 } 9928 if (outer && outer->outer_context) 9929 omp_notice_variable (outer->outer_context, decl, 9930 true); 9931 } 9932 } 9933 } 9934 9935 c = build_omp_clause (input_location, 9936 lastprivate ? OMP_CLAUSE_LASTPRIVATE 9937 : OMP_CLAUSE_PRIVATE); 9938 OMP_CLAUSE_DECL (c) = decl; 9939 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt); 9940 OMP_FOR_CLAUSES (for_stmt) = c; 9941 omp_add_variable (gimplify_omp_ctxp, decl, 9942 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE) 9943 | GOVD_EXPLICIT | GOVD_SEEN); 9944 c = NULL_TREE; 9945 } 9946 } 9947 else if (omp_is_private (gimplify_omp_ctxp, decl, 0)) 9948 omp_notice_variable (gimplify_omp_ctxp, decl, true); 9949 else 9950 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN); 9951 9952 /* If DECL is not a gimple register, create a temporary variable to act 9953 as an iteration counter. This is valid, since DECL cannot be 9954 modified in the body of the loop. Similarly for any iteration vars 9955 in simd with collapse > 1 where the iterator vars must be 9956 lastprivate. */ 9957 if (orig_for_stmt != for_stmt) 9958 var = decl; 9959 else if (!is_gimple_reg (decl) 9960 || (ort == ORT_SIMD 9961 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)) 9962 { 9963 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 9964 /* Make sure omp_add_variable is not called on it prematurely. 9965 We call it ourselves a few lines later. */ 9966 gimplify_omp_ctxp = NULL; 9967 var = create_tmp_var (TREE_TYPE (decl), get_name (decl)); 9968 gimplify_omp_ctxp = ctx; 9969 TREE_OPERAND (t, 0) = var; 9970 9971 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var)); 9972 9973 if (ort == ORT_SIMD 9974 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1) 9975 { 9976 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR); 9977 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1; 9978 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1; 9979 OMP_CLAUSE_DECL (c2) = var; 9980 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt); 9981 OMP_FOR_CLAUSES (for_stmt) = c2; 9982 omp_add_variable (gimplify_omp_ctxp, var, 9983 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN); 9984 if (c == NULL_TREE) 9985 { 9986 c = c2; 9987 c2 = NULL_TREE; 9988 } 9989 } 9990 else 9991 omp_add_variable (gimplify_omp_ctxp, var, 9992 GOVD_PRIVATE | GOVD_SEEN); 9993 } 9994 else 9995 var = decl; 9996 9997 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, 9998 is_gimple_val, fb_rvalue, false); 9999 ret = MIN (ret, tret); 10000 if (ret == GS_ERROR) 10001 return ret; 10002 10003 /* Handle OMP_FOR_COND. */ 10004 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i); 10005 gcc_assert (COMPARISON_CLASS_P (t)); 10006 gcc_assert (TREE_OPERAND (t, 0) == decl); 10007 10008 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, 10009 is_gimple_val, fb_rvalue, false); 10010 ret = MIN (ret, tret); 10011 10012 /* Handle OMP_FOR_INCR. */ 10013 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 10014 switch (TREE_CODE (t)) 10015 { 10016 case PREINCREMENT_EXPR: 10017 case POSTINCREMENT_EXPR: 10018 { 10019 tree decl = TREE_OPERAND (t, 0); 10020 /* c_omp_for_incr_canonicalize_ptr() should have been 10021 called to massage things appropriately. */ 10022 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl))); 10023 10024 if (orig_for_stmt != for_stmt) 10025 break; 10026 t = build_int_cst (TREE_TYPE (decl), 1); 10027 if (c) 10028 OMP_CLAUSE_LINEAR_STEP (c) = t; 10029 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t); 10030 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t); 10031 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t; 10032 break; 10033 } 10034 10035 case PREDECREMENT_EXPR: 10036 case POSTDECREMENT_EXPR: 10037 /* c_omp_for_incr_canonicalize_ptr() should have been 10038 called to massage things appropriately. */ 10039 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl))); 10040 if (orig_for_stmt != for_stmt) 10041 break; 10042 t = build_int_cst (TREE_TYPE (decl), -1); 10043 if (c) 10044 OMP_CLAUSE_LINEAR_STEP (c) = t; 10045 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t); 10046 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t); 10047 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t; 10048 break; 10049 10050 case MODIFY_EXPR: 10051 gcc_assert (TREE_OPERAND (t, 0) == decl); 10052 TREE_OPERAND (t, 0) = var; 10053 10054 t = TREE_OPERAND (t, 1); 10055 switch (TREE_CODE (t)) 10056 { 10057 case PLUS_EXPR: 10058 if (TREE_OPERAND (t, 1) == decl) 10059 { 10060 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0); 10061 TREE_OPERAND (t, 0) = var; 10062 break; 10063 } 10064 10065 /* Fallthru. */ 10066 case MINUS_EXPR: 10067 case POINTER_PLUS_EXPR: 10068 gcc_assert (TREE_OPERAND (t, 0) == decl); 10069 TREE_OPERAND (t, 0) = var; 10070 break; 10071 default: 10072 gcc_unreachable (); 10073 } 10074 10075 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, 10076 is_gimple_val, fb_rvalue, false); 10077 ret = MIN (ret, tret); 10078 if (c) 10079 { 10080 tree step = TREE_OPERAND (t, 1); 10081 tree stept = TREE_TYPE (decl); 10082 if (POINTER_TYPE_P (stept)) 10083 stept = sizetype; 10084 step = fold_convert (stept, step); 10085 if (TREE_CODE (t) == MINUS_EXPR) 10086 step = fold_build1 (NEGATE_EXPR, stept, step); 10087 OMP_CLAUSE_LINEAR_STEP (c) = step; 10088 if (step != TREE_OPERAND (t, 1)) 10089 { 10090 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), 10091 &for_pre_body, NULL, 10092 is_gimple_val, fb_rvalue, false); 10093 ret = MIN (ret, tret); 10094 } 10095 } 10096 break; 10097 10098 default: 10099 gcc_unreachable (); 10100 } 10101 10102 if (c2) 10103 { 10104 gcc_assert (c); 10105 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c); 10106 } 10107 10108 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt) 10109 { 10110 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c)) 10111 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 10112 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL) 10113 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 10114 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c) 10115 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL)) 10116 && OMP_CLAUSE_DECL (c) == decl) 10117 { 10118 if (is_doacross && (collapse == 1 || i >= collapse)) 10119 t = var; 10120 else 10121 { 10122 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 10123 gcc_assert (TREE_CODE (t) == MODIFY_EXPR); 10124 gcc_assert (TREE_OPERAND (t, 0) == var); 10125 t = TREE_OPERAND (t, 1); 10126 gcc_assert (TREE_CODE (t) == PLUS_EXPR 10127 || TREE_CODE (t) == MINUS_EXPR 10128 || TREE_CODE (t) == POINTER_PLUS_EXPR); 10129 gcc_assert (TREE_OPERAND (t, 0) == var); 10130 t = build2 (TREE_CODE (t), TREE_TYPE (decl), 10131 is_doacross ? var : decl, 10132 TREE_OPERAND (t, 1)); 10133 } 10134 gimple_seq *seq; 10135 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE) 10136 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c); 10137 else 10138 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c); 10139 push_gimplify_context (); 10140 gimplify_assign (decl, t, seq); 10141 gimple *bind = NULL; 10142 if (gimplify_ctxp->temps) 10143 { 10144 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE); 10145 *seq = NULL; 10146 gimplify_seq_add_stmt (seq, bind); 10147 } 10148 pop_gimplify_context (bind); 10149 } 10150 } 10151 } 10152 10153 BITMAP_FREE (has_decl_expr); 10154 10155 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP) 10156 { 10157 push_gimplify_context (); 10158 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR) 10159 { 10160 OMP_FOR_BODY (orig_for_stmt) 10161 = build3 (BIND_EXPR, void_type_node, NULL, 10162 OMP_FOR_BODY (orig_for_stmt), NULL); 10163 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1; 10164 } 10165 } 10166 10167 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt), 10168 &for_body); 10169 10170 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP) 10171 { 10172 if (gimple_code (g) == GIMPLE_BIND) 10173 pop_gimplify_context (g); 10174 else 10175 pop_gimplify_context (NULL); 10176 } 10177 10178 if (orig_for_stmt != for_stmt) 10179 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) 10180 { 10181 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); 10182 decl = TREE_OPERAND (t, 0); 10183 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 10184 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP) 10185 gimplify_omp_ctxp = ctx->outer_context; 10186 var = create_tmp_var (TREE_TYPE (decl), get_name (decl)); 10187 gimplify_omp_ctxp = ctx; 10188 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN); 10189 TREE_OPERAND (t, 0) = var; 10190 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 10191 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1)); 10192 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var; 10193 } 10194 10195 gimplify_adjust_omp_clauses (pre_p, for_body, 10196 &OMP_FOR_CLAUSES (orig_for_stmt), 10197 TREE_CODE (orig_for_stmt)); 10198 10199 int kind; 10200 switch (TREE_CODE (orig_for_stmt)) 10201 { 10202 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break; 10203 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break; 10204 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break; 10205 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break; 10206 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break; 10207 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break; 10208 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break; 10209 default: 10210 gcc_unreachable (); 10211 } 10212 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt), 10213 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)), 10214 for_pre_body); 10215 if (orig_for_stmt != for_stmt) 10216 gimple_omp_for_set_combined_p (gfor, true); 10217 if (gimplify_omp_ctxp 10218 && (gimplify_omp_ctxp->combined_loop 10219 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL 10220 && gimplify_omp_ctxp->outer_context 10221 && gimplify_omp_ctxp->outer_context->combined_loop))) 10222 { 10223 gimple_omp_for_set_combined_into_p (gfor, true); 10224 if (gimplify_omp_ctxp->combined_loop) 10225 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD); 10226 else 10227 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR); 10228 } 10229 10230 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) 10231 { 10232 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); 10233 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0)); 10234 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1)); 10235 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i); 10236 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t)); 10237 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1)); 10238 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 10239 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1)); 10240 } 10241 10242 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop 10243 constructs with GIMPLE_OMP_TASK sandwiched in between them. 10244 The outer taskloop stands for computing the number of iterations, 10245 counts for collapsed loops and holding taskloop specific clauses. 10246 The task construct stands for the effect of data sharing on the 10247 explicit task it creates and the inner taskloop stands for expansion 10248 of the static loop inside of the explicit task construct. */ 10249 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP) 10250 { 10251 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor); 10252 tree task_clauses = NULL_TREE; 10253 tree c = *gfor_clauses_ptr; 10254 tree *gtask_clauses_ptr = &task_clauses; 10255 tree outer_for_clauses = NULL_TREE; 10256 tree *gforo_clauses_ptr = &outer_for_clauses; 10257 for (; c; c = OMP_CLAUSE_CHAIN (c)) 10258 switch (OMP_CLAUSE_CODE (c)) 10259 { 10260 /* These clauses are allowed on task, move them there. */ 10261 case OMP_CLAUSE_SHARED: 10262 case OMP_CLAUSE_FIRSTPRIVATE: 10263 case OMP_CLAUSE_DEFAULT: 10264 case OMP_CLAUSE_IF: 10265 case OMP_CLAUSE_UNTIED: 10266 case OMP_CLAUSE_FINAL: 10267 case OMP_CLAUSE_MERGEABLE: 10268 case OMP_CLAUSE_PRIORITY: 10269 *gtask_clauses_ptr = c; 10270 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c); 10271 break; 10272 case OMP_CLAUSE_PRIVATE: 10273 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c)) 10274 { 10275 /* We want private on outer for and firstprivate 10276 on task. */ 10277 *gtask_clauses_ptr 10278 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 10279 OMP_CLAUSE_FIRSTPRIVATE); 10280 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c); 10281 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL); 10282 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr); 10283 *gforo_clauses_ptr = c; 10284 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c); 10285 } 10286 else 10287 { 10288 *gtask_clauses_ptr = c; 10289 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c); 10290 } 10291 break; 10292 /* These clauses go into outer taskloop clauses. */ 10293 case OMP_CLAUSE_GRAINSIZE: 10294 case OMP_CLAUSE_NUM_TASKS: 10295 case OMP_CLAUSE_NOGROUP: 10296 *gforo_clauses_ptr = c; 10297 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c); 10298 break; 10299 /* Taskloop clause we duplicate on both taskloops. */ 10300 case OMP_CLAUSE_COLLAPSE: 10301 *gfor_clauses_ptr = c; 10302 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c); 10303 *gforo_clauses_ptr = copy_node (c); 10304 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr); 10305 break; 10306 /* For lastprivate, keep the clause on inner taskloop, and add 10307 a shared clause on task. If the same decl is also firstprivate, 10308 add also firstprivate clause on the inner taskloop. */ 10309 case OMP_CLAUSE_LASTPRIVATE: 10310 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c)) 10311 { 10312 /* For taskloop C++ lastprivate IVs, we want: 10313 1) private on outer taskloop 10314 2) firstprivate and shared on task 10315 3) lastprivate on inner taskloop */ 10316 *gtask_clauses_ptr 10317 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 10318 OMP_CLAUSE_FIRSTPRIVATE); 10319 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c); 10320 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL); 10321 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr); 10322 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1; 10323 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c), 10324 OMP_CLAUSE_PRIVATE); 10325 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c); 10326 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1; 10327 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c); 10328 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr); 10329 } 10330 *gfor_clauses_ptr = c; 10331 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c); 10332 *gtask_clauses_ptr 10333 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED); 10334 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c); 10335 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)) 10336 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1; 10337 gtask_clauses_ptr 10338 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr); 10339 break; 10340 default: 10341 gcc_unreachable (); 10342 } 10343 *gfor_clauses_ptr = NULL_TREE; 10344 *gtask_clauses_ptr = NULL_TREE; 10345 *gforo_clauses_ptr = NULL_TREE; 10346 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE); 10347 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE, 10348 NULL_TREE, NULL_TREE, NULL_TREE); 10349 gimple_omp_task_set_taskloop_p (g, true); 10350 g = gimple_build_bind (NULL_TREE, g, NULL_TREE); 10351 gomp_for *gforo 10352 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses, 10353 gimple_omp_for_collapse (gfor), 10354 gimple_omp_for_pre_body (gfor)); 10355 gimple_omp_for_set_pre_body (gfor, NULL); 10356 gimple_omp_for_set_combined_p (gforo, true); 10357 gimple_omp_for_set_combined_into_p (gfor, true); 10358 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++) 10359 { 10360 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i)); 10361 tree v = create_tmp_var (type); 10362 gimple_omp_for_set_index (gforo, i, v); 10363 t = unshare_expr (gimple_omp_for_initial (gfor, i)); 10364 gimple_omp_for_set_initial (gforo, i, t); 10365 gimple_omp_for_set_cond (gforo, i, 10366 gimple_omp_for_cond (gfor, i)); 10367 t = unshare_expr (gimple_omp_for_final (gfor, i)); 10368 gimple_omp_for_set_final (gforo, i, t); 10369 t = unshare_expr (gimple_omp_for_incr (gfor, i)); 10370 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i)); 10371 TREE_OPERAND (t, 0) = v; 10372 gimple_omp_for_set_incr (gforo, i, t); 10373 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE); 10374 OMP_CLAUSE_DECL (t) = v; 10375 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo); 10376 gimple_omp_for_set_clauses (gforo, t); 10377 } 10378 gimplify_seq_add_stmt (pre_p, gforo); 10379 } 10380 else 10381 gimplify_seq_add_stmt (pre_p, gfor); 10382 if (ret != GS_ALL_DONE) 10383 return GS_ERROR; 10384 *expr_p = NULL_TREE; 10385 return GS_ALL_DONE; 10386 } 10387 10388 /* Helper function of optimize_target_teams, find OMP_TEAMS inside 10389 of OMP_TARGET's body. */ 10390 10391 static tree 10392 find_omp_teams (tree *tp, int *walk_subtrees, void *) 10393 { 10394 *walk_subtrees = 0; 10395 switch (TREE_CODE (*tp)) 10396 { 10397 case OMP_TEAMS: 10398 return *tp; 10399 case BIND_EXPR: 10400 case STATEMENT_LIST: 10401 *walk_subtrees = 1; 10402 break; 10403 default: 10404 break; 10405 } 10406 return NULL_TREE; 10407 } 10408 10409 /* Helper function of optimize_target_teams, determine if the expression 10410 can be computed safely before the target construct on the host. */ 10411 10412 static tree 10413 computable_teams_clause (tree *tp, int *walk_subtrees, void *) 10414 { 10415 splay_tree_node n; 10416 10417 if (TYPE_P (*tp)) 10418 { 10419 *walk_subtrees = 0; 10420 return NULL_TREE; 10421 } 10422 switch (TREE_CODE (*tp)) 10423 { 10424 case VAR_DECL: 10425 case PARM_DECL: 10426 case RESULT_DECL: 10427 *walk_subtrees = 0; 10428 if (error_operand_p (*tp) 10429 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp)) 10430 || DECL_HAS_VALUE_EXPR_P (*tp) 10431 || DECL_THREAD_LOCAL_P (*tp) 10432 || TREE_SIDE_EFFECTS (*tp) 10433 || TREE_THIS_VOLATILE (*tp)) 10434 return *tp; 10435 if (is_global_var (*tp) 10436 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp)) 10437 || lookup_attribute ("omp declare target link", 10438 DECL_ATTRIBUTES (*tp)))) 10439 return *tp; 10440 if (VAR_P (*tp) 10441 && !DECL_SEEN_IN_BIND_EXPR_P (*tp) 10442 && !is_global_var (*tp) 10443 && decl_function_context (*tp) == current_function_decl) 10444 return *tp; 10445 n = splay_tree_lookup (gimplify_omp_ctxp->variables, 10446 (splay_tree_key) *tp); 10447 if (n == NULL) 10448 { 10449 if (gimplify_omp_ctxp->target_map_scalars_firstprivate) 10450 return NULL_TREE; 10451 return *tp; 10452 } 10453 else if (n->value & GOVD_LOCAL) 10454 return *tp; 10455 else if (n->value & GOVD_FIRSTPRIVATE) 10456 return NULL_TREE; 10457 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO)) 10458 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO)) 10459 return NULL_TREE; 10460 return *tp; 10461 case INTEGER_CST: 10462 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp))) 10463 return *tp; 10464 return NULL_TREE; 10465 case TARGET_EXPR: 10466 if (TARGET_EXPR_INITIAL (*tp) 10467 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL) 10468 return *tp; 10469 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp), 10470 walk_subtrees, NULL); 10471 /* Allow some reasonable subset of integral arithmetics. */ 10472 case PLUS_EXPR: 10473 case MINUS_EXPR: 10474 case MULT_EXPR: 10475 case TRUNC_DIV_EXPR: 10476 case CEIL_DIV_EXPR: 10477 case FLOOR_DIV_EXPR: 10478 case ROUND_DIV_EXPR: 10479 case TRUNC_MOD_EXPR: 10480 case CEIL_MOD_EXPR: 10481 case FLOOR_MOD_EXPR: 10482 case ROUND_MOD_EXPR: 10483 case RDIV_EXPR: 10484 case EXACT_DIV_EXPR: 10485 case MIN_EXPR: 10486 case MAX_EXPR: 10487 case LSHIFT_EXPR: 10488 case RSHIFT_EXPR: 10489 case BIT_IOR_EXPR: 10490 case BIT_XOR_EXPR: 10491 case BIT_AND_EXPR: 10492 case NEGATE_EXPR: 10493 case ABS_EXPR: 10494 case BIT_NOT_EXPR: 10495 case NON_LVALUE_EXPR: 10496 CASE_CONVERT: 10497 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp))) 10498 return *tp; 10499 return NULL_TREE; 10500 /* And disallow anything else, except for comparisons. */ 10501 default: 10502 if (COMPARISON_CLASS_P (*tp)) 10503 return NULL_TREE; 10504 return *tp; 10505 } 10506 } 10507 10508 /* Try to determine if the num_teams and/or thread_limit expressions 10509 can have their values determined already before entering the 10510 target construct. 10511 INTEGER_CSTs trivially are, 10512 integral decls that are firstprivate (explicitly or implicitly) 10513 or explicitly map(always, to:) or map(always, tofrom:) on the target 10514 region too, and expressions involving simple arithmetics on those 10515 too, function calls are not ok, dereferencing something neither etc. 10516 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of 10517 EXPR based on what we find: 10518 0 stands for clause not specified at all, use implementation default 10519 -1 stands for value that can't be determined easily before entering 10520 the target construct. 10521 If teams construct is not present at all, use 1 for num_teams 10522 and 0 for thread_limit (only one team is involved, and the thread 10523 limit is implementation defined. */ 10524 10525 static void 10526 optimize_target_teams (tree target, gimple_seq *pre_p) 10527 { 10528 tree body = OMP_BODY (target); 10529 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL); 10530 tree num_teams = integer_zero_node; 10531 tree thread_limit = integer_zero_node; 10532 location_t num_teams_loc = EXPR_LOCATION (target); 10533 location_t thread_limit_loc = EXPR_LOCATION (target); 10534 tree c, *p, expr; 10535 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp; 10536 10537 if (teams == NULL_TREE) 10538 num_teams = integer_one_node; 10539 else 10540 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c)) 10541 { 10542 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS) 10543 { 10544 p = &num_teams; 10545 num_teams_loc = OMP_CLAUSE_LOCATION (c); 10546 } 10547 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT) 10548 { 10549 p = &thread_limit; 10550 thread_limit_loc = OMP_CLAUSE_LOCATION (c); 10551 } 10552 else 10553 continue; 10554 expr = OMP_CLAUSE_OPERAND (c, 0); 10555 if (TREE_CODE (expr) == INTEGER_CST) 10556 { 10557 *p = expr; 10558 continue; 10559 } 10560 if (walk_tree (&expr, computable_teams_clause, NULL, NULL)) 10561 { 10562 *p = integer_minus_one_node; 10563 continue; 10564 } 10565 *p = expr; 10566 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context; 10567 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false) 10568 == GS_ERROR) 10569 { 10570 gimplify_omp_ctxp = target_ctx; 10571 *p = integer_minus_one_node; 10572 continue; 10573 } 10574 gimplify_omp_ctxp = target_ctx; 10575 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR) 10576 OMP_CLAUSE_OPERAND (c, 0) = *p; 10577 } 10578 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT); 10579 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit; 10580 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target); 10581 OMP_TARGET_CLAUSES (target) = c; 10582 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS); 10583 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams; 10584 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target); 10585 OMP_TARGET_CLAUSES (target) = c; 10586 } 10587 10588 /* Gimplify the gross structure of several OMP constructs. */ 10589 10590 static void 10591 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p) 10592 { 10593 tree expr = *expr_p; 10594 gimple *stmt; 10595 gimple_seq body = NULL; 10596 enum omp_region_type ort; 10597 10598 switch (TREE_CODE (expr)) 10599 { 10600 case OMP_SECTIONS: 10601 case OMP_SINGLE: 10602 ort = ORT_WORKSHARE; 10603 break; 10604 case OMP_TARGET: 10605 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET; 10606 break; 10607 case OACC_KERNELS: 10608 ort = ORT_ACC_KERNELS; 10609 break; 10610 case OACC_PARALLEL: 10611 ort = ORT_ACC_PARALLEL; 10612 break; 10613 case OACC_DATA: 10614 ort = ORT_ACC_DATA; 10615 break; 10616 case OMP_TARGET_DATA: 10617 ort = ORT_TARGET_DATA; 10618 break; 10619 case OMP_TEAMS: 10620 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS; 10621 break; 10622 case OACC_HOST_DATA: 10623 ort = ORT_ACC_HOST_DATA; 10624 break; 10625 default: 10626 gcc_unreachable (); 10627 } 10628 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort, 10629 TREE_CODE (expr)); 10630 if (TREE_CODE (expr) == OMP_TARGET) 10631 optimize_target_teams (expr, pre_p); 10632 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0) 10633 { 10634 push_gimplify_context (); 10635 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body); 10636 if (gimple_code (g) == GIMPLE_BIND) 10637 pop_gimplify_context (g); 10638 else 10639 pop_gimplify_context (NULL); 10640 if ((ort & ORT_TARGET_DATA) != 0) 10641 { 10642 enum built_in_function end_ix; 10643 switch (TREE_CODE (expr)) 10644 { 10645 case OACC_DATA: 10646 case OACC_HOST_DATA: 10647 end_ix = BUILT_IN_GOACC_DATA_END; 10648 break; 10649 case OMP_TARGET_DATA: 10650 end_ix = BUILT_IN_GOMP_TARGET_END_DATA; 10651 break; 10652 default: 10653 gcc_unreachable (); 10654 } 10655 tree fn = builtin_decl_explicit (end_ix); 10656 g = gimple_build_call (fn, 0); 10657 gimple_seq cleanup = NULL; 10658 gimple_seq_add_stmt (&cleanup, g); 10659 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY); 10660 body = NULL; 10661 gimple_seq_add_stmt (&body, g); 10662 } 10663 } 10664 else 10665 gimplify_and_add (OMP_BODY (expr), &body); 10666 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr), 10667 TREE_CODE (expr)); 10668 10669 switch (TREE_CODE (expr)) 10670 { 10671 case OACC_DATA: 10672 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA, 10673 OMP_CLAUSES (expr)); 10674 break; 10675 case OACC_KERNELS: 10676 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS, 10677 OMP_CLAUSES (expr)); 10678 break; 10679 case OACC_HOST_DATA: 10680 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA, 10681 OMP_CLAUSES (expr)); 10682 break; 10683 case OACC_PARALLEL: 10684 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL, 10685 OMP_CLAUSES (expr)); 10686 break; 10687 case OMP_SECTIONS: 10688 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr)); 10689 break; 10690 case OMP_SINGLE: 10691 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr)); 10692 break; 10693 case OMP_TARGET: 10694 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION, 10695 OMP_CLAUSES (expr)); 10696 break; 10697 case OMP_TARGET_DATA: 10698 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA, 10699 OMP_CLAUSES (expr)); 10700 break; 10701 case OMP_TEAMS: 10702 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr)); 10703 break; 10704 default: 10705 gcc_unreachable (); 10706 } 10707 10708 gimplify_seq_add_stmt (pre_p, stmt); 10709 *expr_p = NULL_TREE; 10710 } 10711 10712 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP 10713 target update constructs. */ 10714 10715 static void 10716 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p) 10717 { 10718 tree expr = *expr_p; 10719 int kind; 10720 gomp_target *stmt; 10721 enum omp_region_type ort = ORT_WORKSHARE; 10722 10723 switch (TREE_CODE (expr)) 10724 { 10725 case OACC_ENTER_DATA: 10726 case OACC_EXIT_DATA: 10727 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA; 10728 ort = ORT_ACC; 10729 break; 10730 case OACC_UPDATE: 10731 kind = GF_OMP_TARGET_KIND_OACC_UPDATE; 10732 ort = ORT_ACC; 10733 break; 10734 case OMP_TARGET_UPDATE: 10735 kind = GF_OMP_TARGET_KIND_UPDATE; 10736 break; 10737 case OMP_TARGET_ENTER_DATA: 10738 kind = GF_OMP_TARGET_KIND_ENTER_DATA; 10739 break; 10740 case OMP_TARGET_EXIT_DATA: 10741 kind = GF_OMP_TARGET_KIND_EXIT_DATA; 10742 break; 10743 default: 10744 gcc_unreachable (); 10745 } 10746 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p, 10747 ort, TREE_CODE (expr)); 10748 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr), 10749 TREE_CODE (expr)); 10750 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr)); 10751 10752 gimplify_seq_add_stmt (pre_p, stmt); 10753 *expr_p = NULL_TREE; 10754 } 10755 10756 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have 10757 stabilized the lhs of the atomic operation as *ADDR. Return true if 10758 EXPR is this stabilized form. */ 10759 10760 static bool 10761 goa_lhs_expr_p (tree expr, tree addr) 10762 { 10763 /* Also include casts to other type variants. The C front end is fond 10764 of adding these for e.g. volatile variables. This is like 10765 STRIP_TYPE_NOPS but includes the main variant lookup. */ 10766 STRIP_USELESS_TYPE_CONVERSION (expr); 10767 10768 if (TREE_CODE (expr) == INDIRECT_REF) 10769 { 10770 expr = TREE_OPERAND (expr, 0); 10771 while (expr != addr 10772 && (CONVERT_EXPR_P (expr) 10773 || TREE_CODE (expr) == NON_LVALUE_EXPR) 10774 && TREE_CODE (expr) == TREE_CODE (addr) 10775 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr))) 10776 { 10777 expr = TREE_OPERAND (expr, 0); 10778 addr = TREE_OPERAND (addr, 0); 10779 } 10780 if (expr == addr) 10781 return true; 10782 return (TREE_CODE (addr) == ADDR_EXPR 10783 && TREE_CODE (expr) == ADDR_EXPR 10784 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0)); 10785 } 10786 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0)) 10787 return true; 10788 return false; 10789 } 10790 10791 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an 10792 expression does not involve the lhs, evaluate it into a temporary. 10793 Return 1 if the lhs appeared as a subexpression, 0 if it did not, 10794 or -1 if an error was encountered. */ 10795 10796 static int 10797 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr, 10798 tree lhs_var) 10799 { 10800 tree expr = *expr_p; 10801 int saw_lhs; 10802 10803 if (goa_lhs_expr_p (expr, lhs_addr)) 10804 { 10805 *expr_p = lhs_var; 10806 return 1; 10807 } 10808 if (is_gimple_val (expr)) 10809 return 0; 10810 10811 saw_lhs = 0; 10812 switch (TREE_CODE_CLASS (TREE_CODE (expr))) 10813 { 10814 case tcc_binary: 10815 case tcc_comparison: 10816 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr, 10817 lhs_var); 10818 /* FALLTHRU */ 10819 case tcc_unary: 10820 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr, 10821 lhs_var); 10822 break; 10823 case tcc_expression: 10824 switch (TREE_CODE (expr)) 10825 { 10826 case TRUTH_ANDIF_EXPR: 10827 case TRUTH_ORIF_EXPR: 10828 case TRUTH_AND_EXPR: 10829 case TRUTH_OR_EXPR: 10830 case TRUTH_XOR_EXPR: 10831 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, 10832 lhs_addr, lhs_var); 10833 /* FALLTHRU */ 10834 case TRUTH_NOT_EXPR: 10835 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, 10836 lhs_addr, lhs_var); 10837 break; 10838 case COMPOUND_EXPR: 10839 /* Break out any preevaluations from cp_build_modify_expr. */ 10840 for (; TREE_CODE (expr) == COMPOUND_EXPR; 10841 expr = TREE_OPERAND (expr, 1)) 10842 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p); 10843 *expr_p = expr; 10844 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var); 10845 default: 10846 break; 10847 } 10848 break; 10849 default: 10850 break; 10851 } 10852 10853 if (saw_lhs == 0) 10854 { 10855 enum gimplify_status gs; 10856 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue); 10857 if (gs != GS_ALL_DONE) 10858 saw_lhs = -1; 10859 } 10860 10861 return saw_lhs; 10862 } 10863 10864 /* Gimplify an OMP_ATOMIC statement. */ 10865 10866 static enum gimplify_status 10867 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p) 10868 { 10869 tree addr = TREE_OPERAND (*expr_p, 0); 10870 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ 10871 ? NULL : TREE_OPERAND (*expr_p, 1); 10872 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr))); 10873 tree tmp_load; 10874 gomp_atomic_load *loadstmt; 10875 gomp_atomic_store *storestmt; 10876 10877 tmp_load = create_tmp_reg (type); 10878 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0) 10879 return GS_ERROR; 10880 10881 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue) 10882 != GS_ALL_DONE) 10883 return GS_ERROR; 10884 10885 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr); 10886 gimplify_seq_add_stmt (pre_p, loadstmt); 10887 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue) 10888 != GS_ALL_DONE) 10889 return GS_ERROR; 10890 10891 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ) 10892 rhs = tmp_load; 10893 storestmt = gimple_build_omp_atomic_store (rhs); 10894 gimplify_seq_add_stmt (pre_p, storestmt); 10895 if (OMP_ATOMIC_SEQ_CST (*expr_p)) 10896 { 10897 gimple_omp_atomic_set_seq_cst (loadstmt); 10898 gimple_omp_atomic_set_seq_cst (storestmt); 10899 } 10900 switch (TREE_CODE (*expr_p)) 10901 { 10902 case OMP_ATOMIC_READ: 10903 case OMP_ATOMIC_CAPTURE_OLD: 10904 *expr_p = tmp_load; 10905 gimple_omp_atomic_set_need_value (loadstmt); 10906 break; 10907 case OMP_ATOMIC_CAPTURE_NEW: 10908 *expr_p = rhs; 10909 gimple_omp_atomic_set_need_value (storestmt); 10910 break; 10911 default: 10912 *expr_p = NULL; 10913 break; 10914 } 10915 10916 return GS_ALL_DONE; 10917 } 10918 10919 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the 10920 body, and adding some EH bits. */ 10921 10922 static enum gimplify_status 10923 gimplify_transaction (tree *expr_p, gimple_seq *pre_p) 10924 { 10925 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr); 10926 gimple *body_stmt; 10927 gtransaction *trans_stmt; 10928 gimple_seq body = NULL; 10929 int subcode = 0; 10930 10931 /* Wrap the transaction body in a BIND_EXPR so we have a context 10932 where to put decls for OMP. */ 10933 if (TREE_CODE (tbody) != BIND_EXPR) 10934 { 10935 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL); 10936 TREE_SIDE_EFFECTS (bind) = 1; 10937 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody)); 10938 TRANSACTION_EXPR_BODY (expr) = bind; 10939 } 10940 10941 push_gimplify_context (); 10942 temp = voidify_wrapper_expr (*expr_p, NULL); 10943 10944 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body); 10945 pop_gimplify_context (body_stmt); 10946 10947 trans_stmt = gimple_build_transaction (body); 10948 if (TRANSACTION_EXPR_OUTER (expr)) 10949 subcode = GTMA_IS_OUTER; 10950 else if (TRANSACTION_EXPR_RELAXED (expr)) 10951 subcode = GTMA_IS_RELAXED; 10952 gimple_transaction_set_subcode (trans_stmt, subcode); 10953 10954 gimplify_seq_add_stmt (pre_p, trans_stmt); 10955 10956 if (temp) 10957 { 10958 *expr_p = temp; 10959 return GS_OK; 10960 } 10961 10962 *expr_p = NULL_TREE; 10963 return GS_ALL_DONE; 10964 } 10965 10966 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY 10967 is the OMP_BODY of the original EXPR (which has already been 10968 gimplified so it's not present in the EXPR). 10969 10970 Return the gimplified GIMPLE_OMP_ORDERED tuple. */ 10971 10972 static gimple * 10973 gimplify_omp_ordered (tree expr, gimple_seq body) 10974 { 10975 tree c, decls; 10976 int failures = 0; 10977 unsigned int i; 10978 tree source_c = NULL_TREE; 10979 tree sink_c = NULL_TREE; 10980 10981 if (gimplify_omp_ctxp) 10982 { 10983 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c)) 10984 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND 10985 && gimplify_omp_ctxp->loop_iter_var.is_empty () 10986 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK 10987 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)) 10988 { 10989 error_at (OMP_CLAUSE_LOCATION (c), 10990 "%<ordered%> construct with %<depend%> clause must be " 10991 "closely nested inside a loop with %<ordered%> clause " 10992 "with a parameter"); 10993 failures++; 10994 } 10995 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND 10996 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK) 10997 { 10998 bool fail = false; 10999 for (decls = OMP_CLAUSE_DECL (c), i = 0; 11000 decls && TREE_CODE (decls) == TREE_LIST; 11001 decls = TREE_CHAIN (decls), ++i) 11002 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2) 11003 continue; 11004 else if (TREE_VALUE (decls) 11005 != gimplify_omp_ctxp->loop_iter_var[2 * i]) 11006 { 11007 error_at (OMP_CLAUSE_LOCATION (c), 11008 "variable %qE is not an iteration " 11009 "of outermost loop %d, expected %qE", 11010 TREE_VALUE (decls), i + 1, 11011 gimplify_omp_ctxp->loop_iter_var[2 * i]); 11012 fail = true; 11013 failures++; 11014 } 11015 else 11016 TREE_VALUE (decls) 11017 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1]; 11018 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2) 11019 { 11020 error_at (OMP_CLAUSE_LOCATION (c), 11021 "number of variables in %<depend(sink)%> " 11022 "clause does not match number of " 11023 "iteration variables"); 11024 failures++; 11025 } 11026 sink_c = c; 11027 } 11028 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND 11029 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE) 11030 { 11031 if (source_c) 11032 { 11033 error_at (OMP_CLAUSE_LOCATION (c), 11034 "more than one %<depend(source)%> clause on an " 11035 "%<ordered%> construct"); 11036 failures++; 11037 } 11038 else 11039 source_c = c; 11040 } 11041 } 11042 if (source_c && sink_c) 11043 { 11044 error_at (OMP_CLAUSE_LOCATION (source_c), 11045 "%<depend(source)%> clause specified together with " 11046 "%<depend(sink:)%> clauses on the same construct"); 11047 failures++; 11048 } 11049 11050 if (failures) 11051 return gimple_build_nop (); 11052 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr)); 11053 } 11054 11055 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the 11056 expression produces a value to be used as an operand inside a GIMPLE 11057 statement, the value will be stored back in *EXPR_P. This value will 11058 be a tree of class tcc_declaration, tcc_constant, tcc_reference or 11059 an SSA_NAME. The corresponding sequence of GIMPLE statements is 11060 emitted in PRE_P and POST_P. 11061 11062 Additionally, this process may overwrite parts of the input 11063 expression during gimplification. Ideally, it should be 11064 possible to do non-destructive gimplification. 11065 11066 EXPR_P points to the GENERIC expression to convert to GIMPLE. If 11067 the expression needs to evaluate to a value to be used as 11068 an operand in a GIMPLE statement, this value will be stored in 11069 *EXPR_P on exit. This happens when the caller specifies one 11070 of fb_lvalue or fb_rvalue fallback flags. 11071 11072 PRE_P will contain the sequence of GIMPLE statements corresponding 11073 to the evaluation of EXPR and all the side-effects that must 11074 be executed before the main expression. On exit, the last 11075 statement of PRE_P is the core statement being gimplified. For 11076 instance, when gimplifying 'if (++a)' the last statement in 11077 PRE_P will be 'if (t.1)' where t.1 is the result of 11078 pre-incrementing 'a'. 11079 11080 POST_P will contain the sequence of GIMPLE statements corresponding 11081 to the evaluation of all the side-effects that must be executed 11082 after the main expression. If this is NULL, the post 11083 side-effects are stored at the end of PRE_P. 11084 11085 The reason why the output is split in two is to handle post 11086 side-effects explicitly. In some cases, an expression may have 11087 inner and outer post side-effects which need to be emitted in 11088 an order different from the one given by the recursive 11089 traversal. For instance, for the expression (*p--)++ the post 11090 side-effects of '--' must actually occur *after* the post 11091 side-effects of '++'. However, gimplification will first visit 11092 the inner expression, so if a separate POST sequence was not 11093 used, the resulting sequence would be: 11094 11095 1 t.1 = *p 11096 2 p = p - 1 11097 3 t.2 = t.1 + 1 11098 4 *p = t.2 11099 11100 However, the post-decrement operation in line #2 must not be 11101 evaluated until after the store to *p at line #4, so the 11102 correct sequence should be: 11103 11104 1 t.1 = *p 11105 2 t.2 = t.1 + 1 11106 3 *p = t.2 11107 4 p = p - 1 11108 11109 So, by specifying a separate post queue, it is possible 11110 to emit the post side-effects in the correct order. 11111 If POST_P is NULL, an internal queue will be used. Before 11112 returning to the caller, the sequence POST_P is appended to 11113 the main output sequence PRE_P. 11114 11115 GIMPLE_TEST_F points to a function that takes a tree T and 11116 returns nonzero if T is in the GIMPLE form requested by the 11117 caller. The GIMPLE predicates are in gimple.c. 11118 11119 FALLBACK tells the function what sort of a temporary we want if 11120 gimplification cannot produce an expression that complies with 11121 GIMPLE_TEST_F. 11122 11123 fb_none means that no temporary should be generated 11124 fb_rvalue means that an rvalue is OK to generate 11125 fb_lvalue means that an lvalue is OK to generate 11126 fb_either means that either is OK, but an lvalue is preferable. 11127 fb_mayfail means that gimplification may fail (in which case 11128 GS_ERROR will be returned) 11129 11130 The return value is either GS_ERROR or GS_ALL_DONE, since this 11131 function iterates until EXPR is completely gimplified or an error 11132 occurs. */ 11133 11134 enum gimplify_status 11135 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 11136 bool (*gimple_test_f) (tree), fallback_t fallback) 11137 { 11138 tree tmp; 11139 gimple_seq internal_pre = NULL; 11140 gimple_seq internal_post = NULL; 11141 tree save_expr; 11142 bool is_statement; 11143 location_t saved_location; 11144 enum gimplify_status ret; 11145 gimple_stmt_iterator pre_last_gsi, post_last_gsi; 11146 tree label; 11147 11148 save_expr = *expr_p; 11149 if (save_expr == NULL_TREE) 11150 return GS_ALL_DONE; 11151 11152 /* If we are gimplifying a top-level statement, PRE_P must be valid. */ 11153 is_statement = gimple_test_f == is_gimple_stmt; 11154 if (is_statement) 11155 gcc_assert (pre_p); 11156 11157 /* Consistency checks. */ 11158 if (gimple_test_f == is_gimple_reg) 11159 gcc_assert (fallback & (fb_rvalue | fb_lvalue)); 11160 else if (gimple_test_f == is_gimple_val 11161 || gimple_test_f == is_gimple_call_addr 11162 || gimple_test_f == is_gimple_condexpr 11163 || gimple_test_f == is_gimple_mem_rhs 11164 || gimple_test_f == is_gimple_mem_rhs_or_call 11165 || gimple_test_f == is_gimple_reg_rhs 11166 || gimple_test_f == is_gimple_reg_rhs_or_call 11167 || gimple_test_f == is_gimple_asm_val 11168 || gimple_test_f == is_gimple_mem_ref_addr) 11169 gcc_assert (fallback & fb_rvalue); 11170 else if (gimple_test_f == is_gimple_min_lval 11171 || gimple_test_f == is_gimple_lvalue) 11172 gcc_assert (fallback & fb_lvalue); 11173 else if (gimple_test_f == is_gimple_addressable) 11174 gcc_assert (fallback & fb_either); 11175 else if (gimple_test_f == is_gimple_stmt) 11176 gcc_assert (fallback == fb_none); 11177 else 11178 { 11179 /* We should have recognized the GIMPLE_TEST_F predicate to 11180 know what kind of fallback to use in case a temporary is 11181 needed to hold the value or address of *EXPR_P. */ 11182 gcc_unreachable (); 11183 } 11184 11185 /* We used to check the predicate here and return immediately if it 11186 succeeds. This is wrong; the design is for gimplification to be 11187 idempotent, and for the predicates to only test for valid forms, not 11188 whether they are fully simplified. */ 11189 if (pre_p == NULL) 11190 pre_p = &internal_pre; 11191 11192 if (post_p == NULL) 11193 post_p = &internal_post; 11194 11195 /* Remember the last statements added to PRE_P and POST_P. Every 11196 new statement added by the gimplification helpers needs to be 11197 annotated with location information. To centralize the 11198 responsibility, we remember the last statement that had been 11199 added to both queues before gimplifying *EXPR_P. If 11200 gimplification produces new statements in PRE_P and POST_P, those 11201 statements will be annotated with the same location information 11202 as *EXPR_P. */ 11203 pre_last_gsi = gsi_last (*pre_p); 11204 post_last_gsi = gsi_last (*post_p); 11205 11206 saved_location = input_location; 11207 if (save_expr != error_mark_node 11208 && EXPR_HAS_LOCATION (*expr_p)) 11209 input_location = EXPR_LOCATION (*expr_p); 11210 11211 /* Loop over the specific gimplifiers until the toplevel node 11212 remains the same. */ 11213 do 11214 { 11215 /* Strip away as many useless type conversions as possible 11216 at the toplevel. */ 11217 STRIP_USELESS_TYPE_CONVERSION (*expr_p); 11218 11219 /* Remember the expr. */ 11220 save_expr = *expr_p; 11221 11222 /* Die, die, die, my darling. */ 11223 if (save_expr == error_mark_node 11224 || (TREE_TYPE (save_expr) 11225 && TREE_TYPE (save_expr) == error_mark_node)) 11226 { 11227 ret = GS_ERROR; 11228 break; 11229 } 11230 11231 /* Do any language-specific gimplification. */ 11232 ret = ((enum gimplify_status) 11233 lang_hooks.gimplify_expr (expr_p, pre_p, post_p)); 11234 if (ret == GS_OK) 11235 { 11236 if (*expr_p == NULL_TREE) 11237 break; 11238 if (*expr_p != save_expr) 11239 continue; 11240 } 11241 else if (ret != GS_UNHANDLED) 11242 break; 11243 11244 /* Make sure that all the cases set 'ret' appropriately. */ 11245 ret = GS_UNHANDLED; 11246 switch (TREE_CODE (*expr_p)) 11247 { 11248 /* First deal with the special cases. */ 11249 11250 case POSTINCREMENT_EXPR: 11251 case POSTDECREMENT_EXPR: 11252 case PREINCREMENT_EXPR: 11253 case PREDECREMENT_EXPR: 11254 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p, 11255 fallback != fb_none, 11256 TREE_TYPE (*expr_p)); 11257 break; 11258 11259 case VIEW_CONVERT_EXPR: 11260 if (is_gimple_reg_type (TREE_TYPE (*expr_p)) 11261 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))) 11262 { 11263 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 11264 post_p, is_gimple_val, fb_rvalue); 11265 recalculate_side_effects (*expr_p); 11266 break; 11267 } 11268 /* Fallthru. */ 11269 11270 case ARRAY_REF: 11271 case ARRAY_RANGE_REF: 11272 case REALPART_EXPR: 11273 case IMAGPART_EXPR: 11274 case COMPONENT_REF: 11275 ret = gimplify_compound_lval (expr_p, pre_p, post_p, 11276 fallback ? fallback : fb_rvalue); 11277 break; 11278 11279 case COND_EXPR: 11280 ret = gimplify_cond_expr (expr_p, pre_p, fallback); 11281 11282 /* C99 code may assign to an array in a structure value of a 11283 conditional expression, and this has undefined behavior 11284 only on execution, so create a temporary if an lvalue is 11285 required. */ 11286 if (fallback == fb_lvalue) 11287 { 11288 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false); 11289 mark_addressable (*expr_p); 11290 ret = GS_OK; 11291 } 11292 break; 11293 11294 case CALL_EXPR: 11295 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none); 11296 11297 /* C99 code may assign to an array in a structure returned 11298 from a function, and this has undefined behavior only on 11299 execution, so create a temporary if an lvalue is 11300 required. */ 11301 if (fallback == fb_lvalue) 11302 { 11303 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false); 11304 mark_addressable (*expr_p); 11305 ret = GS_OK; 11306 } 11307 break; 11308 11309 case TREE_LIST: 11310 gcc_unreachable (); 11311 11312 case COMPOUND_EXPR: 11313 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none); 11314 break; 11315 11316 case COMPOUND_LITERAL_EXPR: 11317 ret = gimplify_compound_literal_expr (expr_p, pre_p, 11318 gimple_test_f, fallback); 11319 break; 11320 11321 case MODIFY_EXPR: 11322 case INIT_EXPR: 11323 ret = gimplify_modify_expr (expr_p, pre_p, post_p, 11324 fallback != fb_none); 11325 break; 11326 11327 case TRUTH_ANDIF_EXPR: 11328 case TRUTH_ORIF_EXPR: 11329 { 11330 /* Preserve the original type of the expression and the 11331 source location of the outer expression. */ 11332 tree org_type = TREE_TYPE (*expr_p); 11333 *expr_p = gimple_boolify (*expr_p); 11334 *expr_p = build3_loc (input_location, COND_EXPR, 11335 org_type, *expr_p, 11336 fold_convert_loc 11337 (input_location, 11338 org_type, boolean_true_node), 11339 fold_convert_loc 11340 (input_location, 11341 org_type, boolean_false_node)); 11342 ret = GS_OK; 11343 break; 11344 } 11345 11346 case TRUTH_NOT_EXPR: 11347 { 11348 tree type = TREE_TYPE (*expr_p); 11349 /* The parsers are careful to generate TRUTH_NOT_EXPR 11350 only with operands that are always zero or one. 11351 We do not fold here but handle the only interesting case 11352 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */ 11353 *expr_p = gimple_boolify (*expr_p); 11354 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1) 11355 *expr_p = build1_loc (input_location, BIT_NOT_EXPR, 11356 TREE_TYPE (*expr_p), 11357 TREE_OPERAND (*expr_p, 0)); 11358 else 11359 *expr_p = build2_loc (input_location, BIT_XOR_EXPR, 11360 TREE_TYPE (*expr_p), 11361 TREE_OPERAND (*expr_p, 0), 11362 build_int_cst (TREE_TYPE (*expr_p), 1)); 11363 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p))) 11364 *expr_p = fold_convert_loc (input_location, type, *expr_p); 11365 ret = GS_OK; 11366 break; 11367 } 11368 11369 case ADDR_EXPR: 11370 ret = gimplify_addr_expr (expr_p, pre_p, post_p); 11371 break; 11372 11373 case ANNOTATE_EXPR: 11374 { 11375 tree cond = TREE_OPERAND (*expr_p, 0); 11376 tree kind = TREE_OPERAND (*expr_p, 1); 11377 tree type = TREE_TYPE (cond); 11378 if (!INTEGRAL_TYPE_P (type)) 11379 { 11380 *expr_p = cond; 11381 ret = GS_OK; 11382 break; 11383 } 11384 tree tmp = create_tmp_var (type); 11385 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p)); 11386 gcall *call 11387 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind); 11388 gimple_call_set_lhs (call, tmp); 11389 gimplify_seq_add_stmt (pre_p, call); 11390 *expr_p = tmp; 11391 ret = GS_ALL_DONE; 11392 break; 11393 } 11394 11395 case VA_ARG_EXPR: 11396 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p); 11397 break; 11398 11399 CASE_CONVERT: 11400 if (IS_EMPTY_STMT (*expr_p)) 11401 { 11402 ret = GS_ALL_DONE; 11403 break; 11404 } 11405 11406 if (VOID_TYPE_P (TREE_TYPE (*expr_p)) 11407 || fallback == fb_none) 11408 { 11409 /* Just strip a conversion to void (or in void context) and 11410 try again. */ 11411 *expr_p = TREE_OPERAND (*expr_p, 0); 11412 ret = GS_OK; 11413 break; 11414 } 11415 11416 ret = gimplify_conversion (expr_p); 11417 if (ret == GS_ERROR) 11418 break; 11419 if (*expr_p != save_expr) 11420 break; 11421 /* FALLTHRU */ 11422 11423 case FIX_TRUNC_EXPR: 11424 /* unary_expr: ... | '(' cast ')' val | ... */ 11425 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 11426 is_gimple_val, fb_rvalue); 11427 recalculate_side_effects (*expr_p); 11428 break; 11429 11430 case INDIRECT_REF: 11431 { 11432 bool volatilep = TREE_THIS_VOLATILE (*expr_p); 11433 bool notrap = TREE_THIS_NOTRAP (*expr_p); 11434 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0)); 11435 11436 *expr_p = fold_indirect_ref_loc (input_location, *expr_p); 11437 if (*expr_p != save_expr) 11438 { 11439 ret = GS_OK; 11440 break; 11441 } 11442 11443 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 11444 is_gimple_reg, fb_rvalue); 11445 if (ret == GS_ERROR) 11446 break; 11447 11448 recalculate_side_effects (*expr_p); 11449 *expr_p = fold_build2_loc (input_location, MEM_REF, 11450 TREE_TYPE (*expr_p), 11451 TREE_OPERAND (*expr_p, 0), 11452 build_int_cst (saved_ptr_type, 0)); 11453 TREE_THIS_VOLATILE (*expr_p) = volatilep; 11454 TREE_THIS_NOTRAP (*expr_p) = notrap; 11455 ret = GS_OK; 11456 break; 11457 } 11458 11459 /* We arrive here through the various re-gimplifcation paths. */ 11460 case MEM_REF: 11461 /* First try re-folding the whole thing. */ 11462 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p), 11463 TREE_OPERAND (*expr_p, 0), 11464 TREE_OPERAND (*expr_p, 1)); 11465 if (tmp) 11466 { 11467 REF_REVERSE_STORAGE_ORDER (tmp) 11468 = REF_REVERSE_STORAGE_ORDER (*expr_p); 11469 *expr_p = tmp; 11470 recalculate_side_effects (*expr_p); 11471 ret = GS_OK; 11472 break; 11473 } 11474 /* Avoid re-gimplifying the address operand if it is already 11475 in suitable form. Re-gimplifying would mark the address 11476 operand addressable. Always gimplify when not in SSA form 11477 as we still may have to gimplify decls with value-exprs. */ 11478 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun) 11479 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0))) 11480 { 11481 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 11482 is_gimple_mem_ref_addr, fb_rvalue); 11483 if (ret == GS_ERROR) 11484 break; 11485 } 11486 recalculate_side_effects (*expr_p); 11487 ret = GS_ALL_DONE; 11488 break; 11489 11490 /* Constants need not be gimplified. */ 11491 case INTEGER_CST: 11492 case REAL_CST: 11493 case FIXED_CST: 11494 case STRING_CST: 11495 case COMPLEX_CST: 11496 case VECTOR_CST: 11497 /* Drop the overflow flag on constants, we do not want 11498 that in the GIMPLE IL. */ 11499 if (TREE_OVERFLOW_P (*expr_p)) 11500 *expr_p = drop_tree_overflow (*expr_p); 11501 ret = GS_ALL_DONE; 11502 break; 11503 11504 case CONST_DECL: 11505 /* If we require an lvalue, such as for ADDR_EXPR, retain the 11506 CONST_DECL node. Otherwise the decl is replaceable by its 11507 value. */ 11508 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */ 11509 if (fallback & fb_lvalue) 11510 ret = GS_ALL_DONE; 11511 else 11512 { 11513 *expr_p = DECL_INITIAL (*expr_p); 11514 ret = GS_OK; 11515 } 11516 break; 11517 11518 case DECL_EXPR: 11519 ret = gimplify_decl_expr (expr_p, pre_p); 11520 break; 11521 11522 case BIND_EXPR: 11523 ret = gimplify_bind_expr (expr_p, pre_p); 11524 break; 11525 11526 case LOOP_EXPR: 11527 ret = gimplify_loop_expr (expr_p, pre_p); 11528 break; 11529 11530 case SWITCH_EXPR: 11531 ret = gimplify_switch_expr (expr_p, pre_p); 11532 break; 11533 11534 case EXIT_EXPR: 11535 ret = gimplify_exit_expr (expr_p); 11536 break; 11537 11538 case GOTO_EXPR: 11539 /* If the target is not LABEL, then it is a computed jump 11540 and the target needs to be gimplified. */ 11541 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL) 11542 { 11543 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p, 11544 NULL, is_gimple_val, fb_rvalue); 11545 if (ret == GS_ERROR) 11546 break; 11547 } 11548 gimplify_seq_add_stmt (pre_p, 11549 gimple_build_goto (GOTO_DESTINATION (*expr_p))); 11550 ret = GS_ALL_DONE; 11551 break; 11552 11553 case PREDICT_EXPR: 11554 gimplify_seq_add_stmt (pre_p, 11555 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p), 11556 PREDICT_EXPR_OUTCOME (*expr_p))); 11557 ret = GS_ALL_DONE; 11558 break; 11559 11560 case LABEL_EXPR: 11561 ret = gimplify_label_expr (expr_p, pre_p); 11562 label = LABEL_EXPR_LABEL (*expr_p); 11563 gcc_assert (decl_function_context (label) == current_function_decl); 11564 11565 /* If the label is used in a goto statement, or address of the label 11566 is taken, we need to unpoison all variables that were seen so far. 11567 Doing so would prevent us from reporting a false positives. */ 11568 if (asan_poisoned_variables 11569 && asan_used_labels != NULL 11570 && asan_used_labels->contains (label)) 11571 asan_poison_variables (asan_poisoned_variables, false, pre_p); 11572 break; 11573 11574 case CASE_LABEL_EXPR: 11575 ret = gimplify_case_label_expr (expr_p, pre_p); 11576 11577 if (gimplify_ctxp->live_switch_vars) 11578 asan_poison_variables (gimplify_ctxp->live_switch_vars, false, 11579 pre_p); 11580 break; 11581 11582 case RETURN_EXPR: 11583 ret = gimplify_return_expr (*expr_p, pre_p); 11584 break; 11585 11586 case CONSTRUCTOR: 11587 /* Don't reduce this in place; let gimplify_init_constructor work its 11588 magic. Buf if we're just elaborating this for side effects, just 11589 gimplify any element that has side-effects. */ 11590 if (fallback == fb_none) 11591 { 11592 unsigned HOST_WIDE_INT ix; 11593 tree val; 11594 tree temp = NULL_TREE; 11595 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val) 11596 if (TREE_SIDE_EFFECTS (val)) 11597 append_to_statement_list (val, &temp); 11598 11599 *expr_p = temp; 11600 ret = temp ? GS_OK : GS_ALL_DONE; 11601 } 11602 /* C99 code may assign to an array in a constructed 11603 structure or union, and this has undefined behavior only 11604 on execution, so create a temporary if an lvalue is 11605 required. */ 11606 else if (fallback == fb_lvalue) 11607 { 11608 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false); 11609 mark_addressable (*expr_p); 11610 ret = GS_OK; 11611 } 11612 else 11613 ret = GS_ALL_DONE; 11614 break; 11615 11616 /* The following are special cases that are not handled by the 11617 original GIMPLE grammar. */ 11618 11619 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and 11620 eliminated. */ 11621 case SAVE_EXPR: 11622 ret = gimplify_save_expr (expr_p, pre_p, post_p); 11623 break; 11624 11625 case BIT_FIELD_REF: 11626 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 11627 post_p, is_gimple_lvalue, fb_either); 11628 recalculate_side_effects (*expr_p); 11629 break; 11630 11631 case TARGET_MEM_REF: 11632 { 11633 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE; 11634 11635 if (TMR_BASE (*expr_p)) 11636 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p, 11637 post_p, is_gimple_mem_ref_addr, fb_either); 11638 if (TMR_INDEX (*expr_p)) 11639 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p, 11640 post_p, is_gimple_val, fb_rvalue); 11641 if (TMR_INDEX2 (*expr_p)) 11642 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p, 11643 post_p, is_gimple_val, fb_rvalue); 11644 /* TMR_STEP and TMR_OFFSET are always integer constants. */ 11645 ret = MIN (r0, r1); 11646 } 11647 break; 11648 11649 case NON_LVALUE_EXPR: 11650 /* This should have been stripped above. */ 11651 gcc_unreachable (); 11652 11653 case ASM_EXPR: 11654 ret = gimplify_asm_expr (expr_p, pre_p, post_p); 11655 break; 11656 11657 case TRY_FINALLY_EXPR: 11658 case TRY_CATCH_EXPR: 11659 { 11660 gimple_seq eval, cleanup; 11661 gtry *try_; 11662 11663 /* Calls to destructors are generated automatically in FINALLY/CATCH 11664 block. They should have location as UNKNOWN_LOCATION. However, 11665 gimplify_call_expr will reset these call stmts to input_location 11666 if it finds stmt's location is unknown. To prevent resetting for 11667 destructors, we set the input_location to unknown. 11668 Note that this only affects the destructor calls in FINALLY/CATCH 11669 block, and will automatically reset to its original value by the 11670 end of gimplify_expr. */ 11671 input_location = UNKNOWN_LOCATION; 11672 eval = cleanup = NULL; 11673 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval); 11674 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup); 11675 /* Don't create bogus GIMPLE_TRY with empty cleanup. */ 11676 if (gimple_seq_empty_p (cleanup)) 11677 { 11678 gimple_seq_add_seq (pre_p, eval); 11679 ret = GS_ALL_DONE; 11680 break; 11681 } 11682 try_ = gimple_build_try (eval, cleanup, 11683 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR 11684 ? GIMPLE_TRY_FINALLY 11685 : GIMPLE_TRY_CATCH); 11686 if (EXPR_HAS_LOCATION (save_expr)) 11687 gimple_set_location (try_, EXPR_LOCATION (save_expr)); 11688 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION) 11689 gimple_set_location (try_, saved_location); 11690 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR) 11691 gimple_try_set_catch_is_cleanup (try_, 11692 TRY_CATCH_IS_CLEANUP (*expr_p)); 11693 gimplify_seq_add_stmt (pre_p, try_); 11694 ret = GS_ALL_DONE; 11695 break; 11696 } 11697 11698 case CLEANUP_POINT_EXPR: 11699 ret = gimplify_cleanup_point_expr (expr_p, pre_p); 11700 break; 11701 11702 case TARGET_EXPR: 11703 ret = gimplify_target_expr (expr_p, pre_p, post_p); 11704 break; 11705 11706 case CATCH_EXPR: 11707 { 11708 gimple *c; 11709 gimple_seq handler = NULL; 11710 gimplify_and_add (CATCH_BODY (*expr_p), &handler); 11711 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler); 11712 gimplify_seq_add_stmt (pre_p, c); 11713 ret = GS_ALL_DONE; 11714 break; 11715 } 11716 11717 case EH_FILTER_EXPR: 11718 { 11719 gimple *ehf; 11720 gimple_seq failure = NULL; 11721 11722 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure); 11723 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure); 11724 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p)); 11725 gimplify_seq_add_stmt (pre_p, ehf); 11726 ret = GS_ALL_DONE; 11727 break; 11728 } 11729 11730 case OBJ_TYPE_REF: 11731 { 11732 enum gimplify_status r0, r1; 11733 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, 11734 post_p, is_gimple_val, fb_rvalue); 11735 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, 11736 post_p, is_gimple_val, fb_rvalue); 11737 TREE_SIDE_EFFECTS (*expr_p) = 0; 11738 ret = MIN (r0, r1); 11739 } 11740 break; 11741 11742 case LABEL_DECL: 11743 /* We get here when taking the address of a label. We mark 11744 the label as "forced"; meaning it can never be removed and 11745 it is a potential target for any computed goto. */ 11746 FORCED_LABEL (*expr_p) = 1; 11747 ret = GS_ALL_DONE; 11748 break; 11749 11750 case STATEMENT_LIST: 11751 ret = gimplify_statement_list (expr_p, pre_p); 11752 break; 11753 11754 case WITH_SIZE_EXPR: 11755 { 11756 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 11757 post_p == &internal_post ? NULL : post_p, 11758 gimple_test_f, fallback); 11759 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, 11760 is_gimple_val, fb_rvalue); 11761 ret = GS_ALL_DONE; 11762 } 11763 break; 11764 11765 case VAR_DECL: 11766 case PARM_DECL: 11767 ret = gimplify_var_or_parm_decl (expr_p); 11768 break; 11769 11770 case RESULT_DECL: 11771 /* When within an OMP context, notice uses of variables. */ 11772 if (gimplify_omp_ctxp) 11773 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true); 11774 ret = GS_ALL_DONE; 11775 break; 11776 11777 case SSA_NAME: 11778 /* Allow callbacks into the gimplifier during optimization. */ 11779 ret = GS_ALL_DONE; 11780 break; 11781 11782 case OMP_PARALLEL: 11783 gimplify_omp_parallel (expr_p, pre_p); 11784 ret = GS_ALL_DONE; 11785 break; 11786 11787 case OMP_TASK: 11788 gimplify_omp_task (expr_p, pre_p); 11789 ret = GS_ALL_DONE; 11790 break; 11791 11792 case OMP_FOR: 11793 case OMP_SIMD: 11794 case CILK_SIMD: 11795 case CILK_FOR: 11796 case OMP_DISTRIBUTE: 11797 case OMP_TASKLOOP: 11798 case OACC_LOOP: 11799 ret = gimplify_omp_for (expr_p, pre_p); 11800 break; 11801 11802 case OACC_CACHE: 11803 gimplify_oacc_cache (expr_p, pre_p); 11804 ret = GS_ALL_DONE; 11805 break; 11806 11807 case OACC_DECLARE: 11808 gimplify_oacc_declare (expr_p, pre_p); 11809 ret = GS_ALL_DONE; 11810 break; 11811 11812 case OACC_HOST_DATA: 11813 case OACC_DATA: 11814 case OACC_KERNELS: 11815 case OACC_PARALLEL: 11816 case OMP_SECTIONS: 11817 case OMP_SINGLE: 11818 case OMP_TARGET: 11819 case OMP_TARGET_DATA: 11820 case OMP_TEAMS: 11821 gimplify_omp_workshare (expr_p, pre_p); 11822 ret = GS_ALL_DONE; 11823 break; 11824 11825 case OACC_ENTER_DATA: 11826 case OACC_EXIT_DATA: 11827 case OACC_UPDATE: 11828 case OMP_TARGET_UPDATE: 11829 case OMP_TARGET_ENTER_DATA: 11830 case OMP_TARGET_EXIT_DATA: 11831 gimplify_omp_target_update (expr_p, pre_p); 11832 ret = GS_ALL_DONE; 11833 break; 11834 11835 case OMP_SECTION: 11836 case OMP_MASTER: 11837 case OMP_TASKGROUP: 11838 case OMP_ORDERED: 11839 case OMP_CRITICAL: 11840 { 11841 gimple_seq body = NULL; 11842 gimple *g; 11843 11844 gimplify_and_add (OMP_BODY (*expr_p), &body); 11845 switch (TREE_CODE (*expr_p)) 11846 { 11847 case OMP_SECTION: 11848 g = gimple_build_omp_section (body); 11849 break; 11850 case OMP_MASTER: 11851 g = gimple_build_omp_master (body); 11852 break; 11853 case OMP_TASKGROUP: 11854 { 11855 gimple_seq cleanup = NULL; 11856 tree fn 11857 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END); 11858 g = gimple_build_call (fn, 0); 11859 gimple_seq_add_stmt (&cleanup, g); 11860 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY); 11861 body = NULL; 11862 gimple_seq_add_stmt (&body, g); 11863 g = gimple_build_omp_taskgroup (body); 11864 } 11865 break; 11866 case OMP_ORDERED: 11867 g = gimplify_omp_ordered (*expr_p, body); 11868 break; 11869 case OMP_CRITICAL: 11870 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p), 11871 pre_p, ORT_WORKSHARE, OMP_CRITICAL); 11872 gimplify_adjust_omp_clauses (pre_p, body, 11873 &OMP_CRITICAL_CLAUSES (*expr_p), 11874 OMP_CRITICAL); 11875 g = gimple_build_omp_critical (body, 11876 OMP_CRITICAL_NAME (*expr_p), 11877 OMP_CRITICAL_CLAUSES (*expr_p)); 11878 break; 11879 default: 11880 gcc_unreachable (); 11881 } 11882 gimplify_seq_add_stmt (pre_p, g); 11883 ret = GS_ALL_DONE; 11884 break; 11885 } 11886 11887 case OMP_ATOMIC: 11888 case OMP_ATOMIC_READ: 11889 case OMP_ATOMIC_CAPTURE_OLD: 11890 case OMP_ATOMIC_CAPTURE_NEW: 11891 ret = gimplify_omp_atomic (expr_p, pre_p); 11892 break; 11893 11894 case TRANSACTION_EXPR: 11895 ret = gimplify_transaction (expr_p, pre_p); 11896 break; 11897 11898 case TRUTH_AND_EXPR: 11899 case TRUTH_OR_EXPR: 11900 case TRUTH_XOR_EXPR: 11901 { 11902 tree orig_type = TREE_TYPE (*expr_p); 11903 tree new_type, xop0, xop1; 11904 *expr_p = gimple_boolify (*expr_p); 11905 new_type = TREE_TYPE (*expr_p); 11906 if (!useless_type_conversion_p (orig_type, new_type)) 11907 { 11908 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p); 11909 ret = GS_OK; 11910 break; 11911 } 11912 11913 /* Boolified binary truth expressions are semantically equivalent 11914 to bitwise binary expressions. Canonicalize them to the 11915 bitwise variant. */ 11916 switch (TREE_CODE (*expr_p)) 11917 { 11918 case TRUTH_AND_EXPR: 11919 TREE_SET_CODE (*expr_p, BIT_AND_EXPR); 11920 break; 11921 case TRUTH_OR_EXPR: 11922 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR); 11923 break; 11924 case TRUTH_XOR_EXPR: 11925 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR); 11926 break; 11927 default: 11928 break; 11929 } 11930 /* Now make sure that operands have compatible type to 11931 expression's new_type. */ 11932 xop0 = TREE_OPERAND (*expr_p, 0); 11933 xop1 = TREE_OPERAND (*expr_p, 1); 11934 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0))) 11935 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location, 11936 new_type, 11937 xop0); 11938 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1))) 11939 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location, 11940 new_type, 11941 xop1); 11942 /* Continue classified as tcc_binary. */ 11943 goto expr_2; 11944 } 11945 11946 case VEC_COND_EXPR: 11947 { 11948 enum gimplify_status r0, r1, r2; 11949 11950 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 11951 post_p, is_gimple_condexpr, fb_rvalue); 11952 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 11953 post_p, is_gimple_val, fb_rvalue); 11954 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, 11955 post_p, is_gimple_val, fb_rvalue); 11956 11957 ret = MIN (MIN (r0, r1), r2); 11958 recalculate_side_effects (*expr_p); 11959 } 11960 break; 11961 11962 case FMA_EXPR: 11963 case VEC_PERM_EXPR: 11964 /* Classified as tcc_expression. */ 11965 goto expr_3; 11966 11967 case BIT_INSERT_EXPR: 11968 /* Argument 3 is a constant. */ 11969 goto expr_2; 11970 11971 case POINTER_PLUS_EXPR: 11972 { 11973 enum gimplify_status r0, r1; 11974 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 11975 post_p, is_gimple_val, fb_rvalue); 11976 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 11977 post_p, is_gimple_val, fb_rvalue); 11978 recalculate_side_effects (*expr_p); 11979 ret = MIN (r0, r1); 11980 break; 11981 } 11982 11983 case CILK_SYNC_STMT: 11984 { 11985 if (!fn_contains_cilk_spawn_p (cfun)) 11986 { 11987 error_at (EXPR_LOCATION (*expr_p), 11988 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>"); 11989 ret = GS_ERROR; 11990 } 11991 else 11992 { 11993 gimplify_cilk_sync (expr_p, pre_p); 11994 ret = GS_ALL_DONE; 11995 } 11996 break; 11997 } 11998 11999 default: 12000 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p))) 12001 { 12002 case tcc_comparison: 12003 /* Handle comparison of objects of non scalar mode aggregates 12004 with a call to memcmp. It would be nice to only have to do 12005 this for variable-sized objects, but then we'd have to allow 12006 the same nest of reference nodes we allow for MODIFY_EXPR and 12007 that's too complex. 12008 12009 Compare scalar mode aggregates as scalar mode values. Using 12010 memcmp for them would be very inefficient at best, and is 12011 plain wrong if bitfields are involved. */ 12012 { 12013 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1)); 12014 12015 /* Vector comparisons need no boolification. */ 12016 if (TREE_CODE (type) == VECTOR_TYPE) 12017 goto expr_2; 12018 else if (!AGGREGATE_TYPE_P (type)) 12019 { 12020 tree org_type = TREE_TYPE (*expr_p); 12021 *expr_p = gimple_boolify (*expr_p); 12022 if (!useless_type_conversion_p (org_type, 12023 TREE_TYPE (*expr_p))) 12024 { 12025 *expr_p = fold_convert_loc (input_location, 12026 org_type, *expr_p); 12027 ret = GS_OK; 12028 } 12029 else 12030 goto expr_2; 12031 } 12032 else if (TYPE_MODE (type) != BLKmode) 12033 ret = gimplify_scalar_mode_aggregate_compare (expr_p); 12034 else 12035 ret = gimplify_variable_sized_compare (expr_p); 12036 12037 break; 12038 } 12039 12040 /* If *EXPR_P does not need to be special-cased, handle it 12041 according to its class. */ 12042 case tcc_unary: 12043 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 12044 post_p, is_gimple_val, fb_rvalue); 12045 break; 12046 12047 case tcc_binary: 12048 expr_2: 12049 { 12050 enum gimplify_status r0, r1; 12051 12052 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 12053 post_p, is_gimple_val, fb_rvalue); 12054 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 12055 post_p, is_gimple_val, fb_rvalue); 12056 12057 ret = MIN (r0, r1); 12058 break; 12059 } 12060 12061 expr_3: 12062 { 12063 enum gimplify_status r0, r1, r2; 12064 12065 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 12066 post_p, is_gimple_val, fb_rvalue); 12067 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 12068 post_p, is_gimple_val, fb_rvalue); 12069 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, 12070 post_p, is_gimple_val, fb_rvalue); 12071 12072 ret = MIN (MIN (r0, r1), r2); 12073 break; 12074 } 12075 12076 case tcc_declaration: 12077 case tcc_constant: 12078 ret = GS_ALL_DONE; 12079 goto dont_recalculate; 12080 12081 default: 12082 gcc_unreachable (); 12083 } 12084 12085 recalculate_side_effects (*expr_p); 12086 12087 dont_recalculate: 12088 break; 12089 } 12090 12091 gcc_assert (*expr_p || ret != GS_OK); 12092 } 12093 while (ret == GS_OK); 12094 12095 /* If we encountered an error_mark somewhere nested inside, either 12096 stub out the statement or propagate the error back out. */ 12097 if (ret == GS_ERROR) 12098 { 12099 if (is_statement) 12100 *expr_p = NULL; 12101 goto out; 12102 } 12103 12104 /* This was only valid as a return value from the langhook, which 12105 we handled. Make sure it doesn't escape from any other context. */ 12106 gcc_assert (ret != GS_UNHANDLED); 12107 12108 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p)) 12109 { 12110 /* We aren't looking for a value, and we don't have a valid 12111 statement. If it doesn't have side-effects, throw it away. 12112 We can also get here with code such as "*&&L;", where L is 12113 a LABEL_DECL that is marked as FORCED_LABEL. */ 12114 if (TREE_CODE (*expr_p) == LABEL_DECL 12115 || !TREE_SIDE_EFFECTS (*expr_p)) 12116 *expr_p = NULL; 12117 else if (!TREE_THIS_VOLATILE (*expr_p)) 12118 { 12119 /* This is probably a _REF that contains something nested that 12120 has side effects. Recurse through the operands to find it. */ 12121 enum tree_code code = TREE_CODE (*expr_p); 12122 12123 switch (code) 12124 { 12125 case COMPONENT_REF: 12126 case REALPART_EXPR: 12127 case IMAGPART_EXPR: 12128 case VIEW_CONVERT_EXPR: 12129 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 12130 gimple_test_f, fallback); 12131 break; 12132 12133 case ARRAY_REF: 12134 case ARRAY_RANGE_REF: 12135 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 12136 gimple_test_f, fallback); 12137 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, 12138 gimple_test_f, fallback); 12139 break; 12140 12141 default: 12142 /* Anything else with side-effects must be converted to 12143 a valid statement before we get here. */ 12144 gcc_unreachable (); 12145 } 12146 12147 *expr_p = NULL; 12148 } 12149 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p)) 12150 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode) 12151 { 12152 /* Historically, the compiler has treated a bare reference 12153 to a non-BLKmode volatile lvalue as forcing a load. */ 12154 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p)); 12155 12156 /* Normally, we do not want to create a temporary for a 12157 TREE_ADDRESSABLE type because such a type should not be 12158 copied by bitwise-assignment. However, we make an 12159 exception here, as all we are doing here is ensuring that 12160 we read the bytes that make up the type. We use 12161 create_tmp_var_raw because create_tmp_var will abort when 12162 given a TREE_ADDRESSABLE type. */ 12163 tree tmp = create_tmp_var_raw (type, "vol"); 12164 gimple_add_tmp_var (tmp); 12165 gimplify_assign (tmp, *expr_p, pre_p); 12166 *expr_p = NULL; 12167 } 12168 else 12169 /* We can't do anything useful with a volatile reference to 12170 an incomplete type, so just throw it away. Likewise for 12171 a BLKmode type, since any implicit inner load should 12172 already have been turned into an explicit one by the 12173 gimplification process. */ 12174 *expr_p = NULL; 12175 } 12176 12177 /* If we are gimplifying at the statement level, we're done. Tack 12178 everything together and return. */ 12179 if (fallback == fb_none || is_statement) 12180 { 12181 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear 12182 it out for GC to reclaim it. */ 12183 *expr_p = NULL_TREE; 12184 12185 if (!gimple_seq_empty_p (internal_pre) 12186 || !gimple_seq_empty_p (internal_post)) 12187 { 12188 gimplify_seq_add_seq (&internal_pre, internal_post); 12189 gimplify_seq_add_seq (pre_p, internal_pre); 12190 } 12191 12192 /* The result of gimplifying *EXPR_P is going to be the last few 12193 statements in *PRE_P and *POST_P. Add location information 12194 to all the statements that were added by the gimplification 12195 helpers. */ 12196 if (!gimple_seq_empty_p (*pre_p)) 12197 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location); 12198 12199 if (!gimple_seq_empty_p (*post_p)) 12200 annotate_all_with_location_after (*post_p, post_last_gsi, 12201 input_location); 12202 12203 goto out; 12204 } 12205 12206 #ifdef ENABLE_GIMPLE_CHECKING 12207 if (*expr_p) 12208 { 12209 enum tree_code code = TREE_CODE (*expr_p); 12210 /* These expressions should already be in gimple IR form. */ 12211 gcc_assert (code != MODIFY_EXPR 12212 && code != ASM_EXPR 12213 && code != BIND_EXPR 12214 && code != CATCH_EXPR 12215 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr) 12216 && code != EH_FILTER_EXPR 12217 && code != GOTO_EXPR 12218 && code != LABEL_EXPR 12219 && code != LOOP_EXPR 12220 && code != SWITCH_EXPR 12221 && code != TRY_FINALLY_EXPR 12222 && code != OACC_PARALLEL 12223 && code != OACC_KERNELS 12224 && code != OACC_DATA 12225 && code != OACC_HOST_DATA 12226 && code != OACC_DECLARE 12227 && code != OACC_UPDATE 12228 && code != OACC_ENTER_DATA 12229 && code != OACC_EXIT_DATA 12230 && code != OACC_CACHE 12231 && code != OMP_CRITICAL 12232 && code != OMP_FOR 12233 && code != OACC_LOOP 12234 && code != OMP_MASTER 12235 && code != OMP_TASKGROUP 12236 && code != OMP_ORDERED 12237 && code != OMP_PARALLEL 12238 && code != OMP_SECTIONS 12239 && code != OMP_SECTION 12240 && code != OMP_SINGLE); 12241 } 12242 #endif 12243 12244 /* Otherwise we're gimplifying a subexpression, so the resulting 12245 value is interesting. If it's a valid operand that matches 12246 GIMPLE_TEST_F, we're done. Unless we are handling some 12247 post-effects internally; if that's the case, we need to copy into 12248 a temporary before adding the post-effects to POST_P. */ 12249 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p)) 12250 goto out; 12251 12252 /* Otherwise, we need to create a new temporary for the gimplified 12253 expression. */ 12254 12255 /* We can't return an lvalue if we have an internal postqueue. The 12256 object the lvalue refers to would (probably) be modified by the 12257 postqueue; we need to copy the value out first, which means an 12258 rvalue. */ 12259 if ((fallback & fb_lvalue) 12260 && gimple_seq_empty_p (internal_post) 12261 && is_gimple_addressable (*expr_p)) 12262 { 12263 /* An lvalue will do. Take the address of the expression, store it 12264 in a temporary, and replace the expression with an INDIRECT_REF of 12265 that temporary. */ 12266 tmp = build_fold_addr_expr_loc (input_location, *expr_p); 12267 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue); 12268 *expr_p = build_simple_mem_ref (tmp); 12269 } 12270 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p)) 12271 { 12272 /* An rvalue will do. Assign the gimplified expression into a 12273 new temporary TMP and replace the original expression with 12274 TMP. First, make sure that the expression has a type so that 12275 it can be assigned into a temporary. */ 12276 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p))); 12277 *expr_p = get_formal_tmp_var (*expr_p, pre_p); 12278 } 12279 else 12280 { 12281 #ifdef ENABLE_GIMPLE_CHECKING 12282 if (!(fallback & fb_mayfail)) 12283 { 12284 fprintf (stderr, "gimplification failed:\n"); 12285 print_generic_expr (stderr, *expr_p, 0); 12286 debug_tree (*expr_p); 12287 internal_error ("gimplification failed"); 12288 } 12289 #endif 12290 gcc_assert (fallback & fb_mayfail); 12291 12292 /* If this is an asm statement, and the user asked for the 12293 impossible, don't die. Fail and let gimplify_asm_expr 12294 issue an error. */ 12295 ret = GS_ERROR; 12296 goto out; 12297 } 12298 12299 /* Make sure the temporary matches our predicate. */ 12300 gcc_assert ((*gimple_test_f) (*expr_p)); 12301 12302 if (!gimple_seq_empty_p (internal_post)) 12303 { 12304 annotate_all_with_location (internal_post, input_location); 12305 gimplify_seq_add_seq (pre_p, internal_post); 12306 } 12307 12308 out: 12309 input_location = saved_location; 12310 return ret; 12311 } 12312 12313 /* Like gimplify_expr but make sure the gimplified result is not itself 12314 a SSA name (but a decl if it were). Temporaries required by 12315 evaluating *EXPR_P may be still SSA names. */ 12316 12317 static enum gimplify_status 12318 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 12319 bool (*gimple_test_f) (tree), fallback_t fallback, 12320 bool allow_ssa) 12321 { 12322 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME; 12323 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p, 12324 gimple_test_f, fallback); 12325 if (! allow_ssa 12326 && TREE_CODE (*expr_p) == SSA_NAME) 12327 { 12328 tree name = *expr_p; 12329 if (was_ssa_name_p) 12330 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false); 12331 else 12332 { 12333 /* Avoid the extra copy if possible. */ 12334 *expr_p = create_tmp_reg (TREE_TYPE (name)); 12335 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p); 12336 release_ssa_name (name); 12337 } 12338 } 12339 return ret; 12340 } 12341 12342 /* Look through TYPE for variable-sized objects and gimplify each such 12343 size that we find. Add to LIST_P any statements generated. */ 12344 12345 void 12346 gimplify_type_sizes (tree type, gimple_seq *list_p) 12347 { 12348 tree field, t; 12349 12350 if (type == NULL || type == error_mark_node) 12351 return; 12352 12353 /* We first do the main variant, then copy into any other variants. */ 12354 type = TYPE_MAIN_VARIANT (type); 12355 12356 /* Avoid infinite recursion. */ 12357 if (TYPE_SIZES_GIMPLIFIED (type)) 12358 return; 12359 12360 TYPE_SIZES_GIMPLIFIED (type) = 1; 12361 12362 switch (TREE_CODE (type)) 12363 { 12364 case INTEGER_TYPE: 12365 case ENUMERAL_TYPE: 12366 case BOOLEAN_TYPE: 12367 case REAL_TYPE: 12368 case FIXED_POINT_TYPE: 12369 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p); 12370 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p); 12371 12372 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) 12373 { 12374 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type); 12375 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type); 12376 } 12377 break; 12378 12379 case ARRAY_TYPE: 12380 /* These types may not have declarations, so handle them here. */ 12381 gimplify_type_sizes (TREE_TYPE (type), list_p); 12382 gimplify_type_sizes (TYPE_DOMAIN (type), list_p); 12383 /* Ensure VLA bounds aren't removed, for -O0 they should be variables 12384 with assigned stack slots, for -O1+ -g they should be tracked 12385 by VTA. */ 12386 if (!(TYPE_NAME (type) 12387 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL 12388 && DECL_IGNORED_P (TYPE_NAME (type))) 12389 && TYPE_DOMAIN (type) 12390 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type))) 12391 { 12392 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type)); 12393 if (t && VAR_P (t) && DECL_ARTIFICIAL (t)) 12394 DECL_IGNORED_P (t) = 0; 12395 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); 12396 if (t && VAR_P (t) && DECL_ARTIFICIAL (t)) 12397 DECL_IGNORED_P (t) = 0; 12398 } 12399 break; 12400 12401 case RECORD_TYPE: 12402 case UNION_TYPE: 12403 case QUAL_UNION_TYPE: 12404 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) 12405 if (TREE_CODE (field) == FIELD_DECL) 12406 { 12407 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p); 12408 gimplify_one_sizepos (&DECL_SIZE (field), list_p); 12409 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p); 12410 gimplify_type_sizes (TREE_TYPE (field), list_p); 12411 } 12412 break; 12413 12414 case POINTER_TYPE: 12415 case REFERENCE_TYPE: 12416 /* We used to recurse on the pointed-to type here, which turned out to 12417 be incorrect because its definition might refer to variables not 12418 yet initialized at this point if a forward declaration is involved. 12419 12420 It was actually useful for anonymous pointed-to types to ensure 12421 that the sizes evaluation dominates every possible later use of the 12422 values. Restricting to such types here would be safe since there 12423 is no possible forward declaration around, but would introduce an 12424 undesirable middle-end semantic to anonymity. We then defer to 12425 front-ends the responsibility of ensuring that the sizes are 12426 evaluated both early and late enough, e.g. by attaching artificial 12427 type declarations to the tree. */ 12428 break; 12429 12430 default: 12431 break; 12432 } 12433 12434 gimplify_one_sizepos (&TYPE_SIZE (type), list_p); 12435 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p); 12436 12437 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) 12438 { 12439 TYPE_SIZE (t) = TYPE_SIZE (type); 12440 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type); 12441 TYPE_SIZES_GIMPLIFIED (t) = 1; 12442 } 12443 } 12444 12445 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P, 12446 a size or position, has had all of its SAVE_EXPRs evaluated. 12447 We add any required statements to *STMT_P. */ 12448 12449 void 12450 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p) 12451 { 12452 tree expr = *expr_p; 12453 12454 /* We don't do anything if the value isn't there, is constant, or contains 12455 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already 12456 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier 12457 will want to replace it with a new variable, but that will cause problems 12458 if this type is from outside the function. It's OK to have that here. */ 12459 if (is_gimple_sizepos (expr)) 12460 return; 12461 12462 *expr_p = unshare_expr (expr); 12463 12464 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed 12465 if the def vanishes. */ 12466 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false); 12467 } 12468 12469 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node 12470 containing the sequence of corresponding GIMPLE statements. If DO_PARMS 12471 is true, also gimplify the parameters. */ 12472 12473 gbind * 12474 gimplify_body (tree fndecl, bool do_parms) 12475 { 12476 location_t saved_location = input_location; 12477 gimple_seq parm_stmts, seq; 12478 gimple *outer_stmt; 12479 gbind *outer_bind; 12480 struct cgraph_node *cgn; 12481 12482 timevar_push (TV_TREE_GIMPLIFY); 12483 12484 init_tree_ssa (cfun); 12485 12486 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during 12487 gimplification. */ 12488 default_rtl_profile (); 12489 12490 gcc_assert (gimplify_ctxp == NULL); 12491 push_gimplify_context (true); 12492 12493 if (flag_openacc || flag_openmp) 12494 { 12495 gcc_assert (gimplify_omp_ctxp == NULL); 12496 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl))) 12497 gimplify_omp_ctxp = new_omp_context (ORT_TARGET); 12498 } 12499 12500 /* Unshare most shared trees in the body and in that of any nested functions. 12501 It would seem we don't have to do this for nested functions because 12502 they are supposed to be output and then the outer function gimplified 12503 first, but the g++ front end doesn't always do it that way. */ 12504 unshare_body (fndecl); 12505 unvisit_body (fndecl); 12506 12507 cgn = cgraph_node::get (fndecl); 12508 if (cgn && cgn->origin) 12509 nonlocal_vlas = new hash_set<tree>; 12510 12511 /* Make sure input_location isn't set to something weird. */ 12512 input_location = DECL_SOURCE_LOCATION (fndecl); 12513 12514 /* Resolve callee-copies. This has to be done before processing 12515 the body so that DECL_VALUE_EXPR gets processed correctly. */ 12516 parm_stmts = do_parms ? gimplify_parameters () : NULL; 12517 12518 /* Gimplify the function's body. */ 12519 seq = NULL; 12520 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq); 12521 outer_stmt = gimple_seq_first_stmt (seq); 12522 if (!outer_stmt) 12523 { 12524 outer_stmt = gimple_build_nop (); 12525 gimplify_seq_add_stmt (&seq, outer_stmt); 12526 } 12527 12528 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is 12529 not the case, wrap everything in a GIMPLE_BIND to make it so. */ 12530 if (gimple_code (outer_stmt) == GIMPLE_BIND 12531 && gimple_seq_first (seq) == gimple_seq_last (seq)) 12532 outer_bind = as_a <gbind *> (outer_stmt); 12533 else 12534 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL); 12535 12536 DECL_SAVED_TREE (fndecl) = NULL_TREE; 12537 12538 /* If we had callee-copies statements, insert them at the beginning 12539 of the function and clear DECL_VALUE_EXPR_P on the parameters. */ 12540 if (!gimple_seq_empty_p (parm_stmts)) 12541 { 12542 tree parm; 12543 12544 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind)); 12545 gimple_bind_set_body (outer_bind, parm_stmts); 12546 12547 for (parm = DECL_ARGUMENTS (current_function_decl); 12548 parm; parm = DECL_CHAIN (parm)) 12549 if (DECL_HAS_VALUE_EXPR_P (parm)) 12550 { 12551 DECL_HAS_VALUE_EXPR_P (parm) = 0; 12552 DECL_IGNORED_P (parm) = 0; 12553 } 12554 } 12555 12556 if (nonlocal_vlas) 12557 { 12558 if (nonlocal_vla_vars) 12559 { 12560 /* tree-nested.c may later on call declare_vars (..., true); 12561 which relies on BLOCK_VARS chain to be the tail of the 12562 gimple_bind_vars chain. Ensure we don't violate that 12563 assumption. */ 12564 if (gimple_bind_block (outer_bind) 12565 == DECL_INITIAL (current_function_decl)) 12566 declare_vars (nonlocal_vla_vars, outer_bind, true); 12567 else 12568 BLOCK_VARS (DECL_INITIAL (current_function_decl)) 12569 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)), 12570 nonlocal_vla_vars); 12571 nonlocal_vla_vars = NULL_TREE; 12572 } 12573 delete nonlocal_vlas; 12574 nonlocal_vlas = NULL; 12575 } 12576 12577 if ((flag_openacc || flag_openmp || flag_openmp_simd) 12578 && gimplify_omp_ctxp) 12579 { 12580 delete_omp_context (gimplify_omp_ctxp); 12581 gimplify_omp_ctxp = NULL; 12582 } 12583 12584 pop_gimplify_context (outer_bind); 12585 gcc_assert (gimplify_ctxp == NULL); 12586 12587 if (flag_checking && !seen_error ()) 12588 verify_gimple_in_seq (gimple_bind_body (outer_bind)); 12589 12590 timevar_pop (TV_TREE_GIMPLIFY); 12591 input_location = saved_location; 12592 12593 return outer_bind; 12594 } 12595 12596 typedef char *char_p; /* For DEF_VEC_P. */ 12597 12598 /* Return whether we should exclude FNDECL from instrumentation. */ 12599 12600 static bool 12601 flag_instrument_functions_exclude_p (tree fndecl) 12602 { 12603 vec<char_p> *v; 12604 12605 v = (vec<char_p> *) flag_instrument_functions_exclude_functions; 12606 if (v && v->length () > 0) 12607 { 12608 const char *name; 12609 int i; 12610 char *s; 12611 12612 name = lang_hooks.decl_printable_name (fndecl, 0); 12613 FOR_EACH_VEC_ELT (*v, i, s) 12614 if (strstr (name, s) != NULL) 12615 return true; 12616 } 12617 12618 v = (vec<char_p> *) flag_instrument_functions_exclude_files; 12619 if (v && v->length () > 0) 12620 { 12621 const char *name; 12622 int i; 12623 char *s; 12624 12625 name = DECL_SOURCE_FILE (fndecl); 12626 FOR_EACH_VEC_ELT (*v, i, s) 12627 if (strstr (name, s) != NULL) 12628 return true; 12629 } 12630 12631 return false; 12632 } 12633 12634 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL 12635 node for the function we want to gimplify. 12636 12637 Return the sequence of GIMPLE statements corresponding to the body 12638 of FNDECL. */ 12639 12640 void 12641 gimplify_function_tree (tree fndecl) 12642 { 12643 tree parm, ret; 12644 gimple_seq seq; 12645 gbind *bind; 12646 12647 gcc_assert (!gimple_body (fndecl)); 12648 12649 if (DECL_STRUCT_FUNCTION (fndecl)) 12650 push_cfun (DECL_STRUCT_FUNCTION (fndecl)); 12651 else 12652 push_struct_function (fndecl); 12653 12654 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr 12655 if necessary. */ 12656 cfun->curr_properties |= PROP_gimple_lva; 12657 12658 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm)) 12659 { 12660 /* Preliminarily mark non-addressed complex variables as eligible 12661 for promotion to gimple registers. We'll transform their uses 12662 as we find them. */ 12663 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE 12664 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE) 12665 && !TREE_THIS_VOLATILE (parm) 12666 && !needs_to_live_in_memory (parm)) 12667 DECL_GIMPLE_REG_P (parm) = 1; 12668 } 12669 12670 ret = DECL_RESULT (fndecl); 12671 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE 12672 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE) 12673 && !needs_to_live_in_memory (ret)) 12674 DECL_GIMPLE_REG_P (ret) = 1; 12675 12676 if (asan_sanitize_use_after_scope () && !asan_no_sanitize_address_p ()) 12677 asan_poisoned_variables = new hash_set<tree> (); 12678 bind = gimplify_body (fndecl, true); 12679 if (asan_poisoned_variables) 12680 { 12681 delete asan_poisoned_variables; 12682 asan_poisoned_variables = NULL; 12683 } 12684 12685 /* The tree body of the function is no longer needed, replace it 12686 with the new GIMPLE body. */ 12687 seq = NULL; 12688 gimple_seq_add_stmt (&seq, bind); 12689 gimple_set_body (fndecl, seq); 12690 12691 /* If we're instrumenting function entry/exit, then prepend the call to 12692 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to 12693 catch the exit hook. */ 12694 /* ??? Add some way to ignore exceptions for this TFE. */ 12695 if (flag_instrument_function_entry_exit 12696 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) 12697 /* Do not instrument extern inline functions. */ 12698 && !(DECL_DECLARED_INLINE_P (fndecl) 12699 && DECL_EXTERNAL (fndecl) 12700 && DECL_DISREGARD_INLINE_LIMITS (fndecl)) 12701 && !flag_instrument_functions_exclude_p (fndecl)) 12702 { 12703 tree x; 12704 gbind *new_bind; 12705 gimple *tf; 12706 gimple_seq cleanup = NULL, body = NULL; 12707 tree tmp_var; 12708 gcall *call; 12709 12710 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS); 12711 call = gimple_build_call (x, 1, integer_zero_node); 12712 tmp_var = create_tmp_var (ptr_type_node, "return_addr"); 12713 gimple_call_set_lhs (call, tmp_var); 12714 gimplify_seq_add_stmt (&cleanup, call); 12715 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT); 12716 call = gimple_build_call (x, 2, 12717 build_fold_addr_expr (current_function_decl), 12718 tmp_var); 12719 gimplify_seq_add_stmt (&cleanup, call); 12720 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY); 12721 12722 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS); 12723 call = gimple_build_call (x, 1, integer_zero_node); 12724 tmp_var = create_tmp_var (ptr_type_node, "return_addr"); 12725 gimple_call_set_lhs (call, tmp_var); 12726 gimplify_seq_add_stmt (&body, call); 12727 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER); 12728 call = gimple_build_call (x, 2, 12729 build_fold_addr_expr (current_function_decl), 12730 tmp_var); 12731 gimplify_seq_add_stmt (&body, call); 12732 gimplify_seq_add_stmt (&body, tf); 12733 new_bind = gimple_build_bind (NULL, body, NULL); 12734 12735 /* Replace the current function body with the body 12736 wrapped in the try/finally TF. */ 12737 seq = NULL; 12738 gimple_seq_add_stmt (&seq, new_bind); 12739 gimple_set_body (fndecl, seq); 12740 bind = new_bind; 12741 } 12742 12743 if ((flag_sanitize & SANITIZE_THREAD) != 0 12744 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl))) 12745 { 12746 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0); 12747 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY); 12748 gbind *new_bind = gimple_build_bind (NULL, tf, NULL); 12749 /* Replace the current function body with the body 12750 wrapped in the try/finally TF. */ 12751 seq = NULL; 12752 gimple_seq_add_stmt (&seq, new_bind); 12753 gimple_set_body (fndecl, seq); 12754 } 12755 12756 DECL_SAVED_TREE (fndecl) = NULL_TREE; 12757 cfun->curr_properties |= PROP_gimple_any; 12758 12759 pop_cfun (); 12760 12761 dump_function (TDI_generic, fndecl); 12762 } 12763 12764 /* Return a dummy expression of type TYPE in order to keep going after an 12765 error. */ 12766 12767 static tree 12768 dummy_object (tree type) 12769 { 12770 tree t = build_int_cst (build_pointer_type (type), 0); 12771 return build2 (MEM_REF, type, t, t); 12772 } 12773 12774 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a 12775 builtin function, but a very special sort of operator. */ 12776 12777 enum gimplify_status 12778 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, 12779 gimple_seq *post_p ATTRIBUTE_UNUSED) 12780 { 12781 tree promoted_type, have_va_type; 12782 tree valist = TREE_OPERAND (*expr_p, 0); 12783 tree type = TREE_TYPE (*expr_p); 12784 tree t, tag, aptag; 12785 location_t loc = EXPR_LOCATION (*expr_p); 12786 12787 /* Verify that valist is of the proper type. */ 12788 have_va_type = TREE_TYPE (valist); 12789 if (have_va_type == error_mark_node) 12790 return GS_ERROR; 12791 have_va_type = targetm.canonical_va_list_type (have_va_type); 12792 if (have_va_type == NULL_TREE 12793 && POINTER_TYPE_P (TREE_TYPE (valist))) 12794 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */ 12795 have_va_type 12796 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist))); 12797 gcc_assert (have_va_type != NULL_TREE); 12798 12799 /* Generate a diagnostic for requesting data of a type that cannot 12800 be passed through `...' due to type promotion at the call site. */ 12801 if ((promoted_type = lang_hooks.types.type_promotes_to (type)) 12802 != type) 12803 { 12804 static bool gave_help; 12805 bool warned; 12806 /* Use the expansion point to handle cases such as passing bool (defined 12807 in a system header) through `...'. */ 12808 source_location xloc 12809 = expansion_point_location_if_in_system_header (loc); 12810 12811 /* Unfortunately, this is merely undefined, rather than a constraint 12812 violation, so we cannot make this an error. If this call is never 12813 executed, the program is still strictly conforming. */ 12814 warned = warning_at (xloc, 0, 12815 "%qT is promoted to %qT when passed through %<...%>", 12816 type, promoted_type); 12817 if (!gave_help && warned) 12818 { 12819 gave_help = true; 12820 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)", 12821 promoted_type, type); 12822 } 12823 12824 /* We can, however, treat "undefined" any way we please. 12825 Call abort to encourage the user to fix the program. */ 12826 if (warned) 12827 inform (xloc, "if this code is reached, the program will abort"); 12828 /* Before the abort, allow the evaluation of the va_list 12829 expression to exit or longjmp. */ 12830 gimplify_and_add (valist, pre_p); 12831 t = build_call_expr_loc (loc, 12832 builtin_decl_implicit (BUILT_IN_TRAP), 0); 12833 gimplify_and_add (t, pre_p); 12834 12835 /* This is dead code, but go ahead and finish so that the 12836 mode of the result comes out right. */ 12837 *expr_p = dummy_object (type); 12838 return GS_ALL_DONE; 12839 } 12840 12841 tag = build_int_cst (build_pointer_type (type), 0); 12842 aptag = build_int_cst (TREE_TYPE (valist), 0); 12843 12844 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3, 12845 valist, tag, aptag); 12846 12847 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG 12848 needs to be expanded. */ 12849 cfun->curr_properties &= ~PROP_gimple_lva; 12850 12851 return GS_OK; 12852 } 12853 12854 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P. 12855 12856 DST/SRC are the destination and source respectively. You can pass 12857 ungimplified trees in DST or SRC, in which case they will be 12858 converted to a gimple operand if necessary. 12859 12860 This function returns the newly created GIMPLE_ASSIGN tuple. */ 12861 12862 gimple * 12863 gimplify_assign (tree dst, tree src, gimple_seq *seq_p) 12864 { 12865 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); 12866 gimplify_and_add (t, seq_p); 12867 ggc_free (t); 12868 return gimple_seq_last_stmt (*seq_p); 12869 } 12870 12871 inline hashval_t 12872 gimplify_hasher::hash (const elt_t *p) 12873 { 12874 tree t = p->val; 12875 return iterative_hash_expr (t, 0); 12876 } 12877 12878 inline bool 12879 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2) 12880 { 12881 tree t1 = p1->val; 12882 tree t2 = p2->val; 12883 enum tree_code code = TREE_CODE (t1); 12884 12885 if (TREE_CODE (t2) != code 12886 || TREE_TYPE (t1) != TREE_TYPE (t2)) 12887 return false; 12888 12889 if (!operand_equal_p (t1, t2, 0)) 12890 return false; 12891 12892 /* Only allow them to compare equal if they also hash equal; otherwise 12893 results are nondeterminate, and we fail bootstrap comparison. */ 12894 gcc_checking_assert (hash (p1) == hash (p2)); 12895 12896 return true; 12897 } 12898