1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees 2 tree representation into the GIMPLE form. 3 Copyright (C) 2002-2019 Free Software Foundation, Inc. 4 Major work done by Sebastian Pop <s.pop@laposte.net>, 5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>. 6 7 This file is part of GCC. 8 9 GCC is free software; you can redistribute it and/or modify it under 10 the terms of the GNU General Public License as published by the Free 11 Software Foundation; either version 3, or (at your option) any later 12 version. 13 14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 15 WARRANTY; without even the implied warranty of MERCHANTABILITY or 16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 17 for more details. 18 19 You should have received a copy of the GNU General Public License 20 along with GCC; see the file COPYING3. If not see 21 <http://www.gnu.org/licenses/>. */ 22 23 #include "config.h" 24 #include "system.h" 25 #include "coretypes.h" 26 #include "backend.h" 27 #include "target.h" 28 #include "rtl.h" 29 #include "tree.h" 30 #include "memmodel.h" 31 #include "tm_p.h" 32 #include "gimple.h" 33 #include "gimple-predict.h" 34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */ 35 #include "ssa.h" 36 #include "cgraph.h" 37 #include "tree-pretty-print.h" 38 #include "diagnostic-core.h" 39 #include "alias.h" 40 #include "fold-const.h" 41 #include "calls.h" 42 #include "varasm.h" 43 #include "stmt.h" 44 #include "expr.h" 45 #include "gimple-fold.h" 46 #include "tree-eh.h" 47 #include "gimplify.h" 48 #include "gimple-iterator.h" 49 #include "stor-layout.h" 50 #include "print-tree.h" 51 #include "tree-iterator.h" 52 #include "tree-inline.h" 53 #include "langhooks.h" 54 #include "tree-cfg.h" 55 #include "tree-ssa.h" 56 #include "omp-general.h" 57 #include "omp-low.h" 58 #include "gimple-low.h" 59 #include "gomp-constants.h" 60 #include "splay-tree.h" 61 #include "gimple-walk.h" 62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */ 63 #include "builtins.h" 64 #include "stringpool.h" 65 #include "attribs.h" 66 #include "asan.h" 67 #include "dbgcnt.h" 68 69 /* Hash set of poisoned variables in a bind expr. */ 70 static hash_set<tree> *asan_poisoned_variables = NULL; 71 72 enum gimplify_omp_var_data 73 { 74 GOVD_SEEN = 1, 75 GOVD_EXPLICIT = 2, 76 GOVD_SHARED = 4, 77 GOVD_PRIVATE = 8, 78 GOVD_FIRSTPRIVATE = 16, 79 GOVD_LASTPRIVATE = 32, 80 GOVD_REDUCTION = 64, 81 GOVD_LOCAL = 128, 82 GOVD_MAP = 256, 83 GOVD_DEBUG_PRIVATE = 512, 84 GOVD_PRIVATE_OUTER_REF = 1024, 85 GOVD_LINEAR = 2048, 86 GOVD_ALIGNED = 4096, 87 88 /* Flag for GOVD_MAP: don't copy back. */ 89 GOVD_MAP_TO_ONLY = 8192, 90 91 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */ 92 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384, 93 94 GOVD_MAP_0LEN_ARRAY = 32768, 95 96 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */ 97 GOVD_MAP_ALWAYS_TO = 65536, 98 99 /* Flag for shared vars that are or might be stored to in the region. */ 100 GOVD_WRITTEN = 131072, 101 102 /* Flag for GOVD_MAP, if it is a forced mapping. */ 103 GOVD_MAP_FORCE = 262144, 104 105 /* Flag for GOVD_MAP: must be present already. */ 106 GOVD_MAP_FORCE_PRESENT = 524288, 107 108 /* Flag for GOVD_MAP: only allocate. */ 109 GOVD_MAP_ALLOC_ONLY = 1048576, 110 111 /* Flag for GOVD_MAP: only copy back. */ 112 GOVD_MAP_FROM_ONLY = 2097152, 113 114 GOVD_NONTEMPORAL = 4194304, 115 116 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE 117 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR 118 | GOVD_LOCAL) 119 }; 120 121 122 enum omp_region_type 123 { 124 ORT_WORKSHARE = 0x00, 125 ORT_TASKGROUP = 0x01, 126 ORT_SIMD = 0x04, 127 128 ORT_PARALLEL = 0x08, 129 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1, 130 131 ORT_TASK = 0x10, 132 ORT_UNTIED_TASK = ORT_TASK | 1, 133 ORT_TASKLOOP = ORT_TASK | 2, 134 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2, 135 136 ORT_TEAMS = 0x20, 137 ORT_COMBINED_TEAMS = ORT_TEAMS | 1, 138 ORT_HOST_TEAMS = ORT_TEAMS | 2, 139 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2, 140 141 /* Data region. */ 142 ORT_TARGET_DATA = 0x40, 143 144 /* Data region with offloading. */ 145 ORT_TARGET = 0x80, 146 ORT_COMBINED_TARGET = ORT_TARGET | 1, 147 148 /* OpenACC variants. */ 149 ORT_ACC = 0x100, /* A generic OpenACC region. */ 150 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */ 151 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */ 152 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */ 153 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */ 154 155 /* Dummy OpenMP region, used to disable expansion of 156 DECL_VALUE_EXPRs in taskloop pre body. */ 157 ORT_NONE = 0x200 158 }; 159 160 /* Gimplify hashtable helper. */ 161 162 struct gimplify_hasher : free_ptr_hash <elt_t> 163 { 164 static inline hashval_t hash (const elt_t *); 165 static inline bool equal (const elt_t *, const elt_t *); 166 }; 167 168 struct gimplify_ctx 169 { 170 struct gimplify_ctx *prev_context; 171 172 vec<gbind *> bind_expr_stack; 173 tree temps; 174 gimple_seq conditional_cleanups; 175 tree exit_label; 176 tree return_temp; 177 178 vec<tree> case_labels; 179 hash_set<tree> *live_switch_vars; 180 /* The formal temporary table. Should this be persistent? */ 181 hash_table<gimplify_hasher> *temp_htab; 182 183 int conditions; 184 unsigned into_ssa : 1; 185 unsigned allow_rhs_cond_expr : 1; 186 unsigned in_cleanup_point_expr : 1; 187 unsigned keep_stack : 1; 188 unsigned save_stack : 1; 189 unsigned in_switch_expr : 1; 190 }; 191 192 enum gimplify_defaultmap_kind 193 { 194 GDMK_SCALAR, 195 GDMK_AGGREGATE, 196 GDMK_ALLOCATABLE, 197 GDMK_POINTER 198 }; 199 200 struct gimplify_omp_ctx 201 { 202 struct gimplify_omp_ctx *outer_context; 203 splay_tree variables; 204 hash_set<tree> *privatized_types; 205 /* Iteration variables in an OMP_FOR. */ 206 vec<tree> loop_iter_var; 207 location_t location; 208 enum omp_clause_default_kind default_kind; 209 enum omp_region_type region_type; 210 bool combined_loop; 211 bool distribute; 212 bool target_firstprivatize_array_bases; 213 bool add_safelen1; 214 int defaultmap[4]; 215 }; 216 217 static struct gimplify_ctx *gimplify_ctxp; 218 static struct gimplify_omp_ctx *gimplify_omp_ctxp; 219 220 /* Forward declaration. */ 221 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool); 222 static hash_map<tree, tree> *oacc_declare_returns; 223 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *, 224 bool (*) (tree), fallback_t, bool); 225 226 /* Shorter alias name for the above function for use in gimplify.c 227 only. */ 228 229 static inline void 230 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs) 231 { 232 gimple_seq_add_stmt_without_update (seq_p, gs); 233 } 234 235 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is 236 NULL, a new sequence is allocated. This function is 237 similar to gimple_seq_add_seq, but does not scan the operands. 238 During gimplification, we need to manipulate statement sequences 239 before the def/use vectors have been constructed. */ 240 241 static void 242 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src) 243 { 244 gimple_stmt_iterator si; 245 246 if (src == NULL) 247 return; 248 249 si = gsi_last (*dst_p); 250 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT); 251 } 252 253 254 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing 255 and popping gimplify contexts. */ 256 257 static struct gimplify_ctx *ctx_pool = NULL; 258 259 /* Return a gimplify context struct from the pool. */ 260 261 static inline struct gimplify_ctx * 262 ctx_alloc (void) 263 { 264 struct gimplify_ctx * c = ctx_pool; 265 266 if (c) 267 ctx_pool = c->prev_context; 268 else 269 c = XNEW (struct gimplify_ctx); 270 271 memset (c, '\0', sizeof (*c)); 272 return c; 273 } 274 275 /* Put gimplify context C back into the pool. */ 276 277 static inline void 278 ctx_free (struct gimplify_ctx *c) 279 { 280 c->prev_context = ctx_pool; 281 ctx_pool = c; 282 } 283 284 /* Free allocated ctx stack memory. */ 285 286 void 287 free_gimplify_stack (void) 288 { 289 struct gimplify_ctx *c; 290 291 while ((c = ctx_pool)) 292 { 293 ctx_pool = c->prev_context; 294 free (c); 295 } 296 } 297 298 299 /* Set up a context for the gimplifier. */ 300 301 void 302 push_gimplify_context (bool in_ssa, bool rhs_cond_ok) 303 { 304 struct gimplify_ctx *c = ctx_alloc (); 305 306 c->prev_context = gimplify_ctxp; 307 gimplify_ctxp = c; 308 gimplify_ctxp->into_ssa = in_ssa; 309 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok; 310 } 311 312 /* Tear down a context for the gimplifier. If BODY is non-null, then 313 put the temporaries into the outer BIND_EXPR. Otherwise, put them 314 in the local_decls. 315 316 BODY is not a sequence, but the first tuple in a sequence. */ 317 318 void 319 pop_gimplify_context (gimple *body) 320 { 321 struct gimplify_ctx *c = gimplify_ctxp; 322 323 gcc_assert (c 324 && (!c->bind_expr_stack.exists () 325 || c->bind_expr_stack.is_empty ())); 326 c->bind_expr_stack.release (); 327 gimplify_ctxp = c->prev_context; 328 329 if (body) 330 declare_vars (c->temps, body, false); 331 else 332 record_vars (c->temps); 333 334 delete c->temp_htab; 335 c->temp_htab = NULL; 336 ctx_free (c); 337 } 338 339 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */ 340 341 static void 342 gimple_push_bind_expr (gbind *bind_stmt) 343 { 344 gimplify_ctxp->bind_expr_stack.reserve (8); 345 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt); 346 } 347 348 /* Pop the first element off the stack of bindings. */ 349 350 static void 351 gimple_pop_bind_expr (void) 352 { 353 gimplify_ctxp->bind_expr_stack.pop (); 354 } 355 356 /* Return the first element of the stack of bindings. */ 357 358 gbind * 359 gimple_current_bind_expr (void) 360 { 361 return gimplify_ctxp->bind_expr_stack.last (); 362 } 363 364 /* Return the stack of bindings created during gimplification. */ 365 366 vec<gbind *> 367 gimple_bind_expr_stack (void) 368 { 369 return gimplify_ctxp->bind_expr_stack; 370 } 371 372 /* Return true iff there is a COND_EXPR between us and the innermost 373 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */ 374 375 static bool 376 gimple_conditional_context (void) 377 { 378 return gimplify_ctxp->conditions > 0; 379 } 380 381 /* Note that we've entered a COND_EXPR. */ 382 383 static void 384 gimple_push_condition (void) 385 { 386 #ifdef ENABLE_GIMPLE_CHECKING 387 if (gimplify_ctxp->conditions == 0) 388 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups)); 389 #endif 390 ++(gimplify_ctxp->conditions); 391 } 392 393 /* Note that we've left a COND_EXPR. If we're back at unconditional scope 394 now, add any conditional cleanups we've seen to the prequeue. */ 395 396 static void 397 gimple_pop_condition (gimple_seq *pre_p) 398 { 399 int conds = --(gimplify_ctxp->conditions); 400 401 gcc_assert (conds >= 0); 402 if (conds == 0) 403 { 404 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups); 405 gimplify_ctxp->conditional_cleanups = NULL; 406 } 407 } 408 409 /* A stable comparison routine for use with splay trees and DECLs. */ 410 411 static int 412 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb) 413 { 414 tree a = (tree) xa; 415 tree b = (tree) xb; 416 417 return DECL_UID (a) - DECL_UID (b); 418 } 419 420 /* Create a new omp construct that deals with variable remapping. */ 421 422 static struct gimplify_omp_ctx * 423 new_omp_context (enum omp_region_type region_type) 424 { 425 struct gimplify_omp_ctx *c; 426 427 c = XCNEW (struct gimplify_omp_ctx); 428 c->outer_context = gimplify_omp_ctxp; 429 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); 430 c->privatized_types = new hash_set<tree>; 431 c->location = input_location; 432 c->region_type = region_type; 433 if ((region_type & ORT_TASK) == 0) 434 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED; 435 else 436 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED; 437 c->defaultmap[GDMK_SCALAR] = GOVD_MAP; 438 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP; 439 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP; 440 c->defaultmap[GDMK_POINTER] = GOVD_MAP; 441 442 return c; 443 } 444 445 /* Destroy an omp construct that deals with variable remapping. */ 446 447 static void 448 delete_omp_context (struct gimplify_omp_ctx *c) 449 { 450 splay_tree_delete (c->variables); 451 delete c->privatized_types; 452 c->loop_iter_var.release (); 453 XDELETE (c); 454 } 455 456 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int); 457 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool); 458 459 /* Both gimplify the statement T and append it to *SEQ_P. This function 460 behaves exactly as gimplify_stmt, but you don't have to pass T as a 461 reference. */ 462 463 void 464 gimplify_and_add (tree t, gimple_seq *seq_p) 465 { 466 gimplify_stmt (&t, seq_p); 467 } 468 469 /* Gimplify statement T into sequence *SEQ_P, and return the first 470 tuple in the sequence of generated tuples for this statement. 471 Return NULL if gimplifying T produced no tuples. */ 472 473 static gimple * 474 gimplify_and_return_first (tree t, gimple_seq *seq_p) 475 { 476 gimple_stmt_iterator last = gsi_last (*seq_p); 477 478 gimplify_and_add (t, seq_p); 479 480 if (!gsi_end_p (last)) 481 { 482 gsi_next (&last); 483 return gsi_stmt (last); 484 } 485 else 486 return gimple_seq_first_stmt (*seq_p); 487 } 488 489 /* Returns true iff T is a valid RHS for an assignment to an un-renamed 490 LHS, or for a call argument. */ 491 492 static bool 493 is_gimple_mem_rhs (tree t) 494 { 495 /* If we're dealing with a renamable type, either source or dest must be 496 a renamed variable. */ 497 if (is_gimple_reg_type (TREE_TYPE (t))) 498 return is_gimple_val (t); 499 else 500 return is_gimple_val (t) || is_gimple_lvalue (t); 501 } 502 503 /* Return true if T is a CALL_EXPR or an expression that can be 504 assigned to a temporary. Note that this predicate should only be 505 used during gimplification. See the rationale for this in 506 gimplify_modify_expr. */ 507 508 static bool 509 is_gimple_reg_rhs_or_call (tree t) 510 { 511 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS 512 || TREE_CODE (t) == CALL_EXPR); 513 } 514 515 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that 516 this predicate should only be used during gimplification. See the 517 rationale for this in gimplify_modify_expr. */ 518 519 static bool 520 is_gimple_mem_rhs_or_call (tree t) 521 { 522 /* If we're dealing with a renamable type, either source or dest must be 523 a renamed variable. */ 524 if (is_gimple_reg_type (TREE_TYPE (t))) 525 return is_gimple_val (t); 526 else 527 return (is_gimple_val (t) 528 || is_gimple_lvalue (t) 529 || TREE_CLOBBER_P (t) 530 || TREE_CODE (t) == CALL_EXPR); 531 } 532 533 /* Create a temporary with a name derived from VAL. Subroutine of 534 lookup_tmp_var; nobody else should call this function. */ 535 536 static inline tree 537 create_tmp_from_val (tree val) 538 { 539 /* Drop all qualifiers and address-space information from the value type. */ 540 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val)); 541 tree var = create_tmp_var (type, get_name (val)); 542 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE 543 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE) 544 DECL_GIMPLE_REG_P (var) = 1; 545 return var; 546 } 547 548 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse 549 an existing expression temporary. */ 550 551 static tree 552 lookup_tmp_var (tree val, bool is_formal) 553 { 554 tree ret; 555 556 /* If not optimizing, never really reuse a temporary. local-alloc 557 won't allocate any variable that is used in more than one basic 558 block, which means it will go into memory, causing much extra 559 work in reload and final and poorer code generation, outweighing 560 the extra memory allocation here. */ 561 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val)) 562 ret = create_tmp_from_val (val); 563 else 564 { 565 elt_t elt, *elt_p; 566 elt_t **slot; 567 568 elt.val = val; 569 if (!gimplify_ctxp->temp_htab) 570 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000); 571 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT); 572 if (*slot == NULL) 573 { 574 elt_p = XNEW (elt_t); 575 elt_p->val = val; 576 elt_p->temp = ret = create_tmp_from_val (val); 577 *slot = elt_p; 578 } 579 else 580 { 581 elt_p = *slot; 582 ret = elt_p->temp; 583 } 584 } 585 586 return ret; 587 } 588 589 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */ 590 591 static tree 592 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p, 593 bool is_formal, bool allow_ssa) 594 { 595 tree t, mod; 596 597 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we 598 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */ 599 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call, 600 fb_rvalue); 601 602 if (allow_ssa 603 && gimplify_ctxp->into_ssa 604 && is_gimple_reg_type (TREE_TYPE (val))) 605 { 606 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val))); 607 if (! gimple_in_ssa_p (cfun)) 608 { 609 const char *name = get_name (val); 610 if (name) 611 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name)); 612 } 613 } 614 else 615 t = lookup_tmp_var (val, is_formal); 616 617 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val)); 618 619 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location)); 620 621 /* gimplify_modify_expr might want to reduce this further. */ 622 gimplify_and_add (mod, pre_p); 623 ggc_free (mod); 624 625 return t; 626 } 627 628 /* Return a formal temporary variable initialized with VAL. PRE_P is as 629 in gimplify_expr. Only use this function if: 630 631 1) The value of the unfactored expression represented by VAL will not 632 change between the initialization and use of the temporary, and 633 2) The temporary will not be otherwise modified. 634 635 For instance, #1 means that this is inappropriate for SAVE_EXPR temps, 636 and #2 means it is inappropriate for && temps. 637 638 For other cases, use get_initialized_tmp_var instead. */ 639 640 tree 641 get_formal_tmp_var (tree val, gimple_seq *pre_p) 642 { 643 return internal_get_tmp_var (val, pre_p, NULL, true, true); 644 } 645 646 /* Return a temporary variable initialized with VAL. PRE_P and POST_P 647 are as in gimplify_expr. */ 648 649 tree 650 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p, 651 bool allow_ssa) 652 { 653 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa); 654 } 655 656 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true, 657 generate debug info for them; otherwise don't. */ 658 659 void 660 declare_vars (tree vars, gimple *gs, bool debug_info) 661 { 662 tree last = vars; 663 if (last) 664 { 665 tree temps, block; 666 667 gbind *scope = as_a <gbind *> (gs); 668 669 temps = nreverse (last); 670 671 block = gimple_bind_block (scope); 672 gcc_assert (!block || TREE_CODE (block) == BLOCK); 673 if (!block || !debug_info) 674 { 675 DECL_CHAIN (last) = gimple_bind_vars (scope); 676 gimple_bind_set_vars (scope, temps); 677 } 678 else 679 { 680 /* We need to attach the nodes both to the BIND_EXPR and to its 681 associated BLOCK for debugging purposes. The key point here 682 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR 683 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */ 684 if (BLOCK_VARS (block)) 685 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps); 686 else 687 { 688 gimple_bind_set_vars (scope, 689 chainon (gimple_bind_vars (scope), temps)); 690 BLOCK_VARS (block) = temps; 691 } 692 } 693 } 694 } 695 696 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound 697 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if 698 no such upper bound can be obtained. */ 699 700 static void 701 force_constant_size (tree var) 702 { 703 /* The only attempt we make is by querying the maximum size of objects 704 of the variable's type. */ 705 706 HOST_WIDE_INT max_size; 707 708 gcc_assert (VAR_P (var)); 709 710 max_size = max_int_size_in_bytes (TREE_TYPE (var)); 711 712 gcc_assert (max_size >= 0); 713 714 DECL_SIZE_UNIT (var) 715 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size); 716 DECL_SIZE (var) 717 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT); 718 } 719 720 /* Push the temporary variable TMP into the current binding. */ 721 722 void 723 gimple_add_tmp_var_fn (struct function *fn, tree tmp) 724 { 725 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); 726 727 /* Later processing assumes that the object size is constant, which might 728 not be true at this point. Force the use of a constant upper bound in 729 this case. */ 730 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp))) 731 force_constant_size (tmp); 732 733 DECL_CONTEXT (tmp) = fn->decl; 734 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; 735 736 record_vars_into (tmp, fn->decl); 737 } 738 739 /* Push the temporary variable TMP into the current binding. */ 740 741 void 742 gimple_add_tmp_var (tree tmp) 743 { 744 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); 745 746 /* Later processing assumes that the object size is constant, which might 747 not be true at this point. Force the use of a constant upper bound in 748 this case. */ 749 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp))) 750 force_constant_size (tmp); 751 752 DECL_CONTEXT (tmp) = current_function_decl; 753 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; 754 755 if (gimplify_ctxp) 756 { 757 DECL_CHAIN (tmp) = gimplify_ctxp->temps; 758 gimplify_ctxp->temps = tmp; 759 760 /* Mark temporaries local within the nearest enclosing parallel. */ 761 if (gimplify_omp_ctxp) 762 { 763 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 764 while (ctx 765 && (ctx->region_type == ORT_WORKSHARE 766 || ctx->region_type == ORT_TASKGROUP 767 || ctx->region_type == ORT_SIMD 768 || ctx->region_type == ORT_ACC)) 769 ctx = ctx->outer_context; 770 if (ctx) 771 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN); 772 } 773 } 774 else if (cfun) 775 record_vars (tmp); 776 else 777 { 778 gimple_seq body_seq; 779 780 /* This case is for nested functions. We need to expose the locals 781 they create. */ 782 body_seq = gimple_body (current_function_decl); 783 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false); 784 } 785 } 786 787 788 789 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree 790 nodes that are referenced more than once in GENERIC functions. This is 791 necessary because gimplification (translation into GIMPLE) is performed 792 by modifying tree nodes in-place, so gimplication of a shared node in a 793 first context could generate an invalid GIMPLE form in a second context. 794 795 This is achieved with a simple mark/copy/unmark algorithm that walks the 796 GENERIC representation top-down, marks nodes with TREE_VISITED the first 797 time it encounters them, duplicates them if they already have TREE_VISITED 798 set, and finally removes the TREE_VISITED marks it has set. 799 800 The algorithm works only at the function level, i.e. it generates a GENERIC 801 representation of a function with no nodes shared within the function when 802 passed a GENERIC function (except for nodes that are allowed to be shared). 803 804 At the global level, it is also necessary to unshare tree nodes that are 805 referenced in more than one function, for the same aforementioned reason. 806 This requires some cooperation from the front-end. There are 2 strategies: 807 808 1. Manual unsharing. The front-end needs to call unshare_expr on every 809 expression that might end up being shared across functions. 810 811 2. Deep unsharing. This is an extension of regular unsharing. Instead 812 of calling unshare_expr on expressions that might be shared across 813 functions, the front-end pre-marks them with TREE_VISITED. This will 814 ensure that they are unshared on the first reference within functions 815 when the regular unsharing algorithm runs. The counterpart is that 816 this algorithm must look deeper than for manual unsharing, which is 817 specified by LANG_HOOKS_DEEP_UNSHARING. 818 819 If there are only few specific cases of node sharing across functions, it is 820 probably easier for a front-end to unshare the expressions manually. On the 821 contrary, if the expressions generated at the global level are as widespread 822 as expressions generated within functions, deep unsharing is very likely the 823 way to go. */ 824 825 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes. 826 These nodes model computations that must be done once. If we were to 827 unshare something like SAVE_EXPR(i++), the gimplification process would 828 create wrong code. However, if DATA is non-null, it must hold a pointer 829 set that is used to unshare the subtrees of these nodes. */ 830 831 static tree 832 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data) 833 { 834 tree t = *tp; 835 enum tree_code code = TREE_CODE (t); 836 837 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but 838 copy their subtrees if we can make sure to do it only once. */ 839 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR) 840 { 841 if (data && !((hash_set<tree> *)data)->add (t)) 842 ; 843 else 844 *walk_subtrees = 0; 845 } 846 847 /* Stop at types, decls, constants like copy_tree_r. */ 848 else if (TREE_CODE_CLASS (code) == tcc_type 849 || TREE_CODE_CLASS (code) == tcc_declaration 850 || TREE_CODE_CLASS (code) == tcc_constant) 851 *walk_subtrees = 0; 852 853 /* Cope with the statement expression extension. */ 854 else if (code == STATEMENT_LIST) 855 ; 856 857 /* Leave the bulk of the work to copy_tree_r itself. */ 858 else 859 copy_tree_r (tp, walk_subtrees, NULL); 860 861 return NULL_TREE; 862 } 863 864 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP. 865 If *TP has been visited already, then *TP is deeply copied by calling 866 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */ 867 868 static tree 869 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data) 870 { 871 tree t = *tp; 872 enum tree_code code = TREE_CODE (t); 873 874 /* Skip types, decls, and constants. But we do want to look at their 875 types and the bounds of types. Mark them as visited so we properly 876 unmark their subtrees on the unmark pass. If we've already seen them, 877 don't look down further. */ 878 if (TREE_CODE_CLASS (code) == tcc_type 879 || TREE_CODE_CLASS (code) == tcc_declaration 880 || TREE_CODE_CLASS (code) == tcc_constant) 881 { 882 if (TREE_VISITED (t)) 883 *walk_subtrees = 0; 884 else 885 TREE_VISITED (t) = 1; 886 } 887 888 /* If this node has been visited already, unshare it and don't look 889 any deeper. */ 890 else if (TREE_VISITED (t)) 891 { 892 walk_tree (tp, mostly_copy_tree_r, data, NULL); 893 *walk_subtrees = 0; 894 } 895 896 /* Otherwise, mark the node as visited and keep looking. */ 897 else 898 TREE_VISITED (t) = 1; 899 900 return NULL_TREE; 901 } 902 903 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the 904 copy_if_shared_r callback unmodified. */ 905 906 static inline void 907 copy_if_shared (tree *tp, void *data) 908 { 909 walk_tree (tp, copy_if_shared_r, data, NULL); 910 } 911 912 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of 913 any nested functions. */ 914 915 static void 916 unshare_body (tree fndecl) 917 { 918 struct cgraph_node *cgn = cgraph_node::get (fndecl); 919 /* If the language requires deep unsharing, we need a pointer set to make 920 sure we don't repeatedly unshare subtrees of unshareable nodes. */ 921 hash_set<tree> *visited 922 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL; 923 924 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited); 925 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited); 926 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited); 927 928 delete visited; 929 930 if (cgn) 931 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) 932 unshare_body (cgn->decl); 933 } 934 935 /* Callback for walk_tree to unmark the visited trees rooted at *TP. 936 Subtrees are walked until the first unvisited node is encountered. */ 937 938 static tree 939 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) 940 { 941 tree t = *tp; 942 943 /* If this node has been visited, unmark it and keep looking. */ 944 if (TREE_VISITED (t)) 945 TREE_VISITED (t) = 0; 946 947 /* Otherwise, don't look any deeper. */ 948 else 949 *walk_subtrees = 0; 950 951 return NULL_TREE; 952 } 953 954 /* Unmark the visited trees rooted at *TP. */ 955 956 static inline void 957 unmark_visited (tree *tp) 958 { 959 walk_tree (tp, unmark_visited_r, NULL, NULL); 960 } 961 962 /* Likewise, but mark all trees as not visited. */ 963 964 static void 965 unvisit_body (tree fndecl) 966 { 967 struct cgraph_node *cgn = cgraph_node::get (fndecl); 968 969 unmark_visited (&DECL_SAVED_TREE (fndecl)); 970 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl))); 971 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl))); 972 973 if (cgn) 974 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) 975 unvisit_body (cgn->decl); 976 } 977 978 /* Unconditionally make an unshared copy of EXPR. This is used when using 979 stored expressions which span multiple functions, such as BINFO_VTABLE, 980 as the normal unsharing process can't tell that they're shared. */ 981 982 tree 983 unshare_expr (tree expr) 984 { 985 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL); 986 return expr; 987 } 988 989 /* Worker for unshare_expr_without_location. */ 990 991 static tree 992 prune_expr_location (tree *tp, int *walk_subtrees, void *) 993 { 994 if (EXPR_P (*tp)) 995 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION); 996 else 997 *walk_subtrees = 0; 998 return NULL_TREE; 999 } 1000 1001 /* Similar to unshare_expr but also prune all expression locations 1002 from EXPR. */ 1003 1004 tree 1005 unshare_expr_without_location (tree expr) 1006 { 1007 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL); 1008 if (EXPR_P (expr)) 1009 walk_tree (&expr, prune_expr_location, NULL, NULL); 1010 return expr; 1011 } 1012 1013 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has 1014 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs 1015 comprising at least one DEBUG_BEGIN_STMT followed by exactly one 1016 EXPR is the location of the EXPR. */ 1017 1018 static location_t 1019 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION) 1020 { 1021 if (!expr) 1022 return or_else; 1023 1024 if (EXPR_HAS_LOCATION (expr)) 1025 return EXPR_LOCATION (expr); 1026 1027 if (TREE_CODE (expr) != STATEMENT_LIST) 1028 return or_else; 1029 1030 tree_stmt_iterator i = tsi_start (expr); 1031 1032 bool found = false; 1033 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT) 1034 { 1035 found = true; 1036 tsi_next (&i); 1037 } 1038 1039 if (!found || !tsi_one_before_end_p (i)) 1040 return or_else; 1041 1042 return rexpr_location (tsi_stmt (i), or_else); 1043 } 1044 1045 /* Return TRUE iff EXPR (maybe recursively) has a location; see 1046 rexpr_location for the potential recursion. */ 1047 1048 static inline bool 1049 rexpr_has_location (tree expr) 1050 { 1051 return rexpr_location (expr) != UNKNOWN_LOCATION; 1052 } 1053 1054 1055 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both 1056 contain statements and have a value. Assign its value to a temporary 1057 and give it void_type_node. Return the temporary, or NULL_TREE if 1058 WRAPPER was already void. */ 1059 1060 tree 1061 voidify_wrapper_expr (tree wrapper, tree temp) 1062 { 1063 tree type = TREE_TYPE (wrapper); 1064 if (type && !VOID_TYPE_P (type)) 1065 { 1066 tree *p; 1067 1068 /* Set p to point to the body of the wrapper. Loop until we find 1069 something that isn't a wrapper. */ 1070 for (p = &wrapper; p && *p; ) 1071 { 1072 switch (TREE_CODE (*p)) 1073 { 1074 case BIND_EXPR: 1075 TREE_SIDE_EFFECTS (*p) = 1; 1076 TREE_TYPE (*p) = void_type_node; 1077 /* For a BIND_EXPR, the body is operand 1. */ 1078 p = &BIND_EXPR_BODY (*p); 1079 break; 1080 1081 case CLEANUP_POINT_EXPR: 1082 case TRY_FINALLY_EXPR: 1083 case TRY_CATCH_EXPR: 1084 TREE_SIDE_EFFECTS (*p) = 1; 1085 TREE_TYPE (*p) = void_type_node; 1086 p = &TREE_OPERAND (*p, 0); 1087 break; 1088 1089 case STATEMENT_LIST: 1090 { 1091 tree_stmt_iterator i = tsi_last (*p); 1092 TREE_SIDE_EFFECTS (*p) = 1; 1093 TREE_TYPE (*p) = void_type_node; 1094 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i); 1095 } 1096 break; 1097 1098 case COMPOUND_EXPR: 1099 /* Advance to the last statement. Set all container types to 1100 void. */ 1101 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1)) 1102 { 1103 TREE_SIDE_EFFECTS (*p) = 1; 1104 TREE_TYPE (*p) = void_type_node; 1105 } 1106 break; 1107 1108 case TRANSACTION_EXPR: 1109 TREE_SIDE_EFFECTS (*p) = 1; 1110 TREE_TYPE (*p) = void_type_node; 1111 p = &TRANSACTION_EXPR_BODY (*p); 1112 break; 1113 1114 default: 1115 /* Assume that any tree upon which voidify_wrapper_expr is 1116 directly called is a wrapper, and that its body is op0. */ 1117 if (p == &wrapper) 1118 { 1119 TREE_SIDE_EFFECTS (*p) = 1; 1120 TREE_TYPE (*p) = void_type_node; 1121 p = &TREE_OPERAND (*p, 0); 1122 break; 1123 } 1124 goto out; 1125 } 1126 } 1127 1128 out: 1129 if (p == NULL || IS_EMPTY_STMT (*p)) 1130 temp = NULL_TREE; 1131 else if (temp) 1132 { 1133 /* The wrapper is on the RHS of an assignment that we're pushing 1134 down. */ 1135 gcc_assert (TREE_CODE (temp) == INIT_EXPR 1136 || TREE_CODE (temp) == MODIFY_EXPR); 1137 TREE_OPERAND (temp, 1) = *p; 1138 *p = temp; 1139 } 1140 else 1141 { 1142 temp = create_tmp_var (type, "retval"); 1143 *p = build2 (INIT_EXPR, type, temp, *p); 1144 } 1145 1146 return temp; 1147 } 1148 1149 return NULL_TREE; 1150 } 1151 1152 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as 1153 a temporary through which they communicate. */ 1154 1155 static void 1156 build_stack_save_restore (gcall **save, gcall **restore) 1157 { 1158 tree tmp_var; 1159 1160 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0); 1161 tmp_var = create_tmp_var (ptr_type_node, "saved_stack"); 1162 gimple_call_set_lhs (*save, tmp_var); 1163 1164 *restore 1165 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE), 1166 1, tmp_var); 1167 } 1168 1169 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */ 1170 1171 static tree 1172 build_asan_poison_call_expr (tree decl) 1173 { 1174 /* Do not poison variables that have size equal to zero. */ 1175 tree unit_size = DECL_SIZE_UNIT (decl); 1176 if (zerop (unit_size)) 1177 return NULL_TREE; 1178 1179 tree base = build_fold_addr_expr (decl); 1180 1181 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK, 1182 void_type_node, 3, 1183 build_int_cst (integer_type_node, 1184 ASAN_MARK_POISON), 1185 base, unit_size); 1186 } 1187 1188 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending 1189 on POISON flag, shadow memory of a DECL variable. The call will be 1190 put on location identified by IT iterator, where BEFORE flag drives 1191 position where the stmt will be put. */ 1192 1193 static void 1194 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it, 1195 bool before) 1196 { 1197 tree unit_size = DECL_SIZE_UNIT (decl); 1198 tree base = build_fold_addr_expr (decl); 1199 1200 /* Do not poison variables that have size equal to zero. */ 1201 if (zerop (unit_size)) 1202 return; 1203 1204 /* It's necessary to have all stack variables aligned to ASAN granularity 1205 bytes. */ 1206 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY) 1207 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY); 1208 1209 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON; 1210 1211 gimple *g 1212 = gimple_build_call_internal (IFN_ASAN_MARK, 3, 1213 build_int_cst (integer_type_node, flags), 1214 base, unit_size); 1215 1216 if (before) 1217 gsi_insert_before (it, g, GSI_NEW_STMT); 1218 else 1219 gsi_insert_after (it, g, GSI_NEW_STMT); 1220 } 1221 1222 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag 1223 either poisons or unpoisons a DECL. Created statement is appended 1224 to SEQ_P gimple sequence. */ 1225 1226 static void 1227 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p) 1228 { 1229 gimple_stmt_iterator it = gsi_last (*seq_p); 1230 bool before = false; 1231 1232 if (gsi_end_p (it)) 1233 before = true; 1234 1235 asan_poison_variable (decl, poison, &it, before); 1236 } 1237 1238 /* Sort pair of VAR_DECLs A and B by DECL_UID. */ 1239 1240 static int 1241 sort_by_decl_uid (const void *a, const void *b) 1242 { 1243 const tree *t1 = (const tree *)a; 1244 const tree *t2 = (const tree *)b; 1245 1246 int uid1 = DECL_UID (*t1); 1247 int uid2 = DECL_UID (*t2); 1248 1249 if (uid1 < uid2) 1250 return -1; 1251 else if (uid1 > uid2) 1252 return 1; 1253 else 1254 return 0; 1255 } 1256 1257 /* Generate IFN_ASAN_MARK internal call for all VARIABLES 1258 depending on POISON flag. Created statement is appended 1259 to SEQ_P gimple sequence. */ 1260 1261 static void 1262 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p) 1263 { 1264 unsigned c = variables->elements (); 1265 if (c == 0) 1266 return; 1267 1268 auto_vec<tree> sorted_variables (c); 1269 1270 for (hash_set<tree>::iterator it = variables->begin (); 1271 it != variables->end (); ++it) 1272 sorted_variables.safe_push (*it); 1273 1274 sorted_variables.qsort (sort_by_decl_uid); 1275 1276 unsigned i; 1277 tree var; 1278 FOR_EACH_VEC_ELT (sorted_variables, i, var) 1279 { 1280 asan_poison_variable (var, poison, seq_p); 1281 1282 /* Add use_after_scope_memory attribute for the variable in order 1283 to prevent re-written into SSA. */ 1284 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE, 1285 DECL_ATTRIBUTES (var))) 1286 DECL_ATTRIBUTES (var) 1287 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE), 1288 integer_one_node, 1289 DECL_ATTRIBUTES (var)); 1290 } 1291 } 1292 1293 /* Gimplify a BIND_EXPR. Just voidify and recurse. */ 1294 1295 static enum gimplify_status 1296 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p) 1297 { 1298 tree bind_expr = *expr_p; 1299 bool old_keep_stack = gimplify_ctxp->keep_stack; 1300 bool old_save_stack = gimplify_ctxp->save_stack; 1301 tree t; 1302 gbind *bind_stmt; 1303 gimple_seq body, cleanup; 1304 gcall *stack_save; 1305 location_t start_locus = 0, end_locus = 0; 1306 tree ret_clauses = NULL; 1307 1308 tree temp = voidify_wrapper_expr (bind_expr, NULL); 1309 1310 /* Mark variables seen in this bind expr. */ 1311 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t)) 1312 { 1313 if (VAR_P (t)) 1314 { 1315 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 1316 1317 /* Mark variable as local. */ 1318 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t) 1319 && (! DECL_SEEN_IN_BIND_EXPR_P (t) 1320 || splay_tree_lookup (ctx->variables, 1321 (splay_tree_key) t) == NULL)) 1322 { 1323 int flag = GOVD_LOCAL; 1324 if (ctx->region_type == ORT_SIMD 1325 && TREE_ADDRESSABLE (t) 1326 && !TREE_STATIC (t)) 1327 { 1328 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST) 1329 ctx->add_safelen1 = true; 1330 else 1331 flag = GOVD_PRIVATE; 1332 } 1333 omp_add_variable (ctx, t, flag | GOVD_SEEN); 1334 } 1335 1336 DECL_SEEN_IN_BIND_EXPR_P (t) = 1; 1337 1338 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun) 1339 cfun->has_local_explicit_reg_vars = true; 1340 } 1341 1342 /* Preliminarily mark non-addressed complex variables as eligible 1343 for promotion to gimple registers. We'll transform their uses 1344 as we find them. */ 1345 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE 1346 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) 1347 && !TREE_THIS_VOLATILE (t) 1348 && (VAR_P (t) && !DECL_HARD_REGISTER (t)) 1349 && !needs_to_live_in_memory (t)) 1350 DECL_GIMPLE_REG_P (t) = 1; 1351 } 1352 1353 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL, 1354 BIND_EXPR_BLOCK (bind_expr)); 1355 gimple_push_bind_expr (bind_stmt); 1356 1357 gimplify_ctxp->keep_stack = false; 1358 gimplify_ctxp->save_stack = false; 1359 1360 /* Gimplify the body into the GIMPLE_BIND tuple's body. */ 1361 body = NULL; 1362 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body); 1363 gimple_bind_set_body (bind_stmt, body); 1364 1365 /* Source location wise, the cleanup code (stack_restore and clobbers) 1366 belongs to the end of the block, so propagate what we have. The 1367 stack_save operation belongs to the beginning of block, which we can 1368 infer from the bind_expr directly if the block has no explicit 1369 assignment. */ 1370 if (BIND_EXPR_BLOCK (bind_expr)) 1371 { 1372 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr)); 1373 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr)); 1374 } 1375 if (start_locus == 0) 1376 start_locus = EXPR_LOCATION (bind_expr); 1377 1378 cleanup = NULL; 1379 stack_save = NULL; 1380 1381 /* If the code both contains VLAs and calls alloca, then we cannot reclaim 1382 the stack space allocated to the VLAs. */ 1383 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack) 1384 { 1385 gcall *stack_restore; 1386 1387 /* Save stack on entry and restore it on exit. Add a try_finally 1388 block to achieve this. */ 1389 build_stack_save_restore (&stack_save, &stack_restore); 1390 1391 gimple_set_location (stack_save, start_locus); 1392 gimple_set_location (stack_restore, end_locus); 1393 1394 gimplify_seq_add_stmt (&cleanup, stack_restore); 1395 } 1396 1397 /* Add clobbers for all variables that go out of scope. */ 1398 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t)) 1399 { 1400 if (VAR_P (t) 1401 && !is_global_var (t) 1402 && DECL_CONTEXT (t) == current_function_decl) 1403 { 1404 if (!DECL_HARD_REGISTER (t) 1405 && !TREE_THIS_VOLATILE (t) 1406 && !DECL_HAS_VALUE_EXPR_P (t) 1407 /* Only care for variables that have to be in memory. Others 1408 will be rewritten into SSA names, hence moved to the 1409 top-level. */ 1410 && !is_gimple_reg (t) 1411 && flag_stack_reuse != SR_NONE) 1412 { 1413 tree clobber = build_clobber (TREE_TYPE (t)); 1414 gimple *clobber_stmt; 1415 clobber_stmt = gimple_build_assign (t, clobber); 1416 gimple_set_location (clobber_stmt, end_locus); 1417 gimplify_seq_add_stmt (&cleanup, clobber_stmt); 1418 } 1419 1420 if (flag_openacc && oacc_declare_returns != NULL) 1421 { 1422 tree *c = oacc_declare_returns->get (t); 1423 if (c != NULL) 1424 { 1425 if (ret_clauses) 1426 OMP_CLAUSE_CHAIN (*c) = ret_clauses; 1427 1428 ret_clauses = *c; 1429 1430 oacc_declare_returns->remove (t); 1431 1432 if (oacc_declare_returns->elements () == 0) 1433 { 1434 delete oacc_declare_returns; 1435 oacc_declare_returns = NULL; 1436 } 1437 } 1438 } 1439 } 1440 1441 if (asan_poisoned_variables != NULL 1442 && asan_poisoned_variables->contains (t)) 1443 { 1444 asan_poisoned_variables->remove (t); 1445 asan_poison_variable (t, true, &cleanup); 1446 } 1447 1448 if (gimplify_ctxp->live_switch_vars != NULL 1449 && gimplify_ctxp->live_switch_vars->contains (t)) 1450 gimplify_ctxp->live_switch_vars->remove (t); 1451 } 1452 1453 if (ret_clauses) 1454 { 1455 gomp_target *stmt; 1456 gimple_stmt_iterator si = gsi_start (cleanup); 1457 1458 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE, 1459 ret_clauses); 1460 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT); 1461 } 1462 1463 if (cleanup) 1464 { 1465 gtry *gs; 1466 gimple_seq new_body; 1467 1468 new_body = NULL; 1469 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup, 1470 GIMPLE_TRY_FINALLY); 1471 1472 if (stack_save) 1473 gimplify_seq_add_stmt (&new_body, stack_save); 1474 gimplify_seq_add_stmt (&new_body, gs); 1475 gimple_bind_set_body (bind_stmt, new_body); 1476 } 1477 1478 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */ 1479 if (!gimplify_ctxp->keep_stack) 1480 gimplify_ctxp->keep_stack = old_keep_stack; 1481 gimplify_ctxp->save_stack = old_save_stack; 1482 1483 gimple_pop_bind_expr (); 1484 1485 gimplify_seq_add_stmt (pre_p, bind_stmt); 1486 1487 if (temp) 1488 { 1489 *expr_p = temp; 1490 return GS_OK; 1491 } 1492 1493 *expr_p = NULL_TREE; 1494 return GS_ALL_DONE; 1495 } 1496 1497 /* Maybe add early return predict statement to PRE_P sequence. */ 1498 1499 static void 1500 maybe_add_early_return_predict_stmt (gimple_seq *pre_p) 1501 { 1502 /* If we are not in a conditional context, add PREDICT statement. */ 1503 if (gimple_conditional_context ()) 1504 { 1505 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN, 1506 NOT_TAKEN); 1507 gimplify_seq_add_stmt (pre_p, predict); 1508 } 1509 } 1510 1511 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a 1512 GIMPLE value, it is assigned to a new temporary and the statement is 1513 re-written to return the temporary. 1514 1515 PRE_P points to the sequence where side effects that must happen before 1516 STMT should be stored. */ 1517 1518 static enum gimplify_status 1519 gimplify_return_expr (tree stmt, gimple_seq *pre_p) 1520 { 1521 greturn *ret; 1522 tree ret_expr = TREE_OPERAND (stmt, 0); 1523 tree result_decl, result; 1524 1525 if (ret_expr == error_mark_node) 1526 return GS_ERROR; 1527 1528 if (!ret_expr 1529 || TREE_CODE (ret_expr) == RESULT_DECL) 1530 { 1531 maybe_add_early_return_predict_stmt (pre_p); 1532 greturn *ret = gimple_build_return (ret_expr); 1533 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); 1534 gimplify_seq_add_stmt (pre_p, ret); 1535 return GS_ALL_DONE; 1536 } 1537 1538 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))) 1539 result_decl = NULL_TREE; 1540 else 1541 { 1542 result_decl = TREE_OPERAND (ret_expr, 0); 1543 1544 /* See through a return by reference. */ 1545 if (TREE_CODE (result_decl) == INDIRECT_REF) 1546 result_decl = TREE_OPERAND (result_decl, 0); 1547 1548 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR 1549 || TREE_CODE (ret_expr) == INIT_EXPR) 1550 && TREE_CODE (result_decl) == RESULT_DECL); 1551 } 1552 1553 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL. 1554 Recall that aggregate_value_p is FALSE for any aggregate type that is 1555 returned in registers. If we're returning values in registers, then 1556 we don't want to extend the lifetime of the RESULT_DECL, particularly 1557 across another call. In addition, for those aggregates for which 1558 hard_function_value generates a PARALLEL, we'll die during normal 1559 expansion of structure assignments; there's special code in expand_return 1560 to handle this case that does not exist in expand_expr. */ 1561 if (!result_decl) 1562 result = NULL_TREE; 1563 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl))) 1564 { 1565 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST) 1566 { 1567 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl))) 1568 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p); 1569 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL 1570 should be effectively allocated by the caller, i.e. all calls to 1571 this function must be subject to the Return Slot Optimization. */ 1572 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p); 1573 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p); 1574 } 1575 result = result_decl; 1576 } 1577 else if (gimplify_ctxp->return_temp) 1578 result = gimplify_ctxp->return_temp; 1579 else 1580 { 1581 result = create_tmp_reg (TREE_TYPE (result_decl)); 1582 1583 /* ??? With complex control flow (usually involving abnormal edges), 1584 we can wind up warning about an uninitialized value for this. Due 1585 to how this variable is constructed and initialized, this is never 1586 true. Give up and never warn. */ 1587 TREE_NO_WARNING (result) = 1; 1588 1589 gimplify_ctxp->return_temp = result; 1590 } 1591 1592 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use. 1593 Then gimplify the whole thing. */ 1594 if (result != result_decl) 1595 TREE_OPERAND (ret_expr, 0) = result; 1596 1597 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p); 1598 1599 maybe_add_early_return_predict_stmt (pre_p); 1600 ret = gimple_build_return (result); 1601 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); 1602 gimplify_seq_add_stmt (pre_p, ret); 1603 1604 return GS_ALL_DONE; 1605 } 1606 1607 /* Gimplify a variable-length array DECL. */ 1608 1609 static void 1610 gimplify_vla_decl (tree decl, gimple_seq *seq_p) 1611 { 1612 /* This is a variable-sized decl. Simplify its size and mark it 1613 for deferred expansion. */ 1614 tree t, addr, ptr_type; 1615 1616 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p); 1617 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p); 1618 1619 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */ 1620 if (DECL_HAS_VALUE_EXPR_P (decl)) 1621 return; 1622 1623 /* All occurrences of this decl in final gimplified code will be 1624 replaced by indirection. Setting DECL_VALUE_EXPR does two 1625 things: First, it lets the rest of the gimplifier know what 1626 replacement to use. Second, it lets the debug info know 1627 where to find the value. */ 1628 ptr_type = build_pointer_type (TREE_TYPE (decl)); 1629 addr = create_tmp_var (ptr_type, get_name (decl)); 1630 DECL_IGNORED_P (addr) = 0; 1631 t = build_fold_indirect_ref (addr); 1632 TREE_THIS_NOTRAP (t) = 1; 1633 SET_DECL_VALUE_EXPR (decl, t); 1634 DECL_HAS_VALUE_EXPR_P (decl) = 1; 1635 1636 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl), 1637 max_int_size_in_bytes (TREE_TYPE (decl))); 1638 /* The call has been built for a variable-sized object. */ 1639 CALL_ALLOCA_FOR_VAR_P (t) = 1; 1640 t = fold_convert (ptr_type, t); 1641 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t); 1642 1643 gimplify_and_add (t, seq_p); 1644 } 1645 1646 /* A helper function to be called via walk_tree. Mark all labels under *TP 1647 as being forced. To be called for DECL_INITIAL of static variables. */ 1648 1649 static tree 1650 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) 1651 { 1652 if (TYPE_P (*tp)) 1653 *walk_subtrees = 0; 1654 if (TREE_CODE (*tp) == LABEL_DECL) 1655 { 1656 FORCED_LABEL (*tp) = 1; 1657 cfun->has_forced_label_in_static = 1; 1658 } 1659 1660 return NULL_TREE; 1661 } 1662 1663 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation 1664 and initialization explicit. */ 1665 1666 static enum gimplify_status 1667 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p) 1668 { 1669 tree stmt = *stmt_p; 1670 tree decl = DECL_EXPR_DECL (stmt); 1671 1672 *stmt_p = NULL_TREE; 1673 1674 if (TREE_TYPE (decl) == error_mark_node) 1675 return GS_ERROR; 1676 1677 if ((TREE_CODE (decl) == TYPE_DECL 1678 || VAR_P (decl)) 1679 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl))) 1680 { 1681 gimplify_type_sizes (TREE_TYPE (decl), seq_p); 1682 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE) 1683 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p); 1684 } 1685 1686 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified 1687 in case its size expressions contain problematic nodes like CALL_EXPR. */ 1688 if (TREE_CODE (decl) == TYPE_DECL 1689 && DECL_ORIGINAL_TYPE (decl) 1690 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl))) 1691 { 1692 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p); 1693 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE) 1694 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p); 1695 } 1696 1697 if (VAR_P (decl) && !DECL_EXTERNAL (decl)) 1698 { 1699 tree init = DECL_INITIAL (decl); 1700 bool is_vla = false; 1701 1702 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST 1703 || (!TREE_STATIC (decl) 1704 && flag_stack_check == GENERIC_STACK_CHECK 1705 && compare_tree_int (DECL_SIZE_UNIT (decl), 1706 STACK_CHECK_MAX_VAR_SIZE) > 0)) 1707 { 1708 gimplify_vla_decl (decl, seq_p); 1709 is_vla = true; 1710 } 1711 1712 if (asan_poisoned_variables 1713 && !is_vla 1714 && TREE_ADDRESSABLE (decl) 1715 && !TREE_STATIC (decl) 1716 && !DECL_HAS_VALUE_EXPR_P (decl) 1717 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT 1718 && dbg_cnt (asan_use_after_scope) 1719 && !gimplify_omp_ctxp) 1720 { 1721 asan_poisoned_variables->add (decl); 1722 asan_poison_variable (decl, false, seq_p); 1723 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars) 1724 gimplify_ctxp->live_switch_vars->add (decl); 1725 } 1726 1727 /* Some front ends do not explicitly declare all anonymous 1728 artificial variables. We compensate here by declaring the 1729 variables, though it would be better if the front ends would 1730 explicitly declare them. */ 1731 if (!DECL_SEEN_IN_BIND_EXPR_P (decl) 1732 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE) 1733 gimple_add_tmp_var (decl); 1734 1735 if (init && init != error_mark_node) 1736 { 1737 if (!TREE_STATIC (decl)) 1738 { 1739 DECL_INITIAL (decl) = NULL_TREE; 1740 init = build2 (INIT_EXPR, void_type_node, decl, init); 1741 gimplify_and_add (init, seq_p); 1742 ggc_free (init); 1743 } 1744 else 1745 /* We must still examine initializers for static variables 1746 as they may contain a label address. */ 1747 walk_tree (&init, force_labels_r, NULL, NULL); 1748 } 1749 } 1750 1751 return GS_ALL_DONE; 1752 } 1753 1754 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body 1755 and replacing the LOOP_EXPR with goto, but if the loop contains an 1756 EXIT_EXPR, we need to append a label for it to jump to. */ 1757 1758 static enum gimplify_status 1759 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p) 1760 { 1761 tree saved_label = gimplify_ctxp->exit_label; 1762 tree start_label = create_artificial_label (UNKNOWN_LOCATION); 1763 1764 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label)); 1765 1766 gimplify_ctxp->exit_label = NULL_TREE; 1767 1768 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p); 1769 1770 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label)); 1771 1772 if (gimplify_ctxp->exit_label) 1773 gimplify_seq_add_stmt (pre_p, 1774 gimple_build_label (gimplify_ctxp->exit_label)); 1775 1776 gimplify_ctxp->exit_label = saved_label; 1777 1778 *expr_p = NULL; 1779 return GS_ALL_DONE; 1780 } 1781 1782 /* Gimplify a statement list onto a sequence. These may be created either 1783 by an enlightened front-end, or by shortcut_cond_expr. */ 1784 1785 static enum gimplify_status 1786 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p) 1787 { 1788 tree temp = voidify_wrapper_expr (*expr_p, NULL); 1789 1790 tree_stmt_iterator i = tsi_start (*expr_p); 1791 1792 while (!tsi_end_p (i)) 1793 { 1794 gimplify_stmt (tsi_stmt_ptr (i), pre_p); 1795 tsi_delink (&i); 1796 } 1797 1798 if (temp) 1799 { 1800 *expr_p = temp; 1801 return GS_OK; 1802 } 1803 1804 return GS_ALL_DONE; 1805 } 1806 1807 /* Callback for walk_gimple_seq. */ 1808 1809 static tree 1810 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, 1811 struct walk_stmt_info *wi) 1812 { 1813 gimple *stmt = gsi_stmt (*gsi_p); 1814 1815 *handled_ops_p = true; 1816 switch (gimple_code (stmt)) 1817 { 1818 case GIMPLE_TRY: 1819 /* A compiler-generated cleanup or a user-written try block. 1820 If it's empty, don't dive into it--that would result in 1821 worse location info. */ 1822 if (gimple_try_eval (stmt) == NULL) 1823 { 1824 wi->info = stmt; 1825 return integer_zero_node; 1826 } 1827 /* Fall through. */ 1828 case GIMPLE_BIND: 1829 case GIMPLE_CATCH: 1830 case GIMPLE_EH_FILTER: 1831 case GIMPLE_TRANSACTION: 1832 /* Walk the sub-statements. */ 1833 *handled_ops_p = false; 1834 break; 1835 1836 case GIMPLE_DEBUG: 1837 /* Ignore these. We may generate them before declarations that 1838 are never executed. If there's something to warn about, 1839 there will be non-debug stmts too, and we'll catch those. */ 1840 break; 1841 1842 case GIMPLE_CALL: 1843 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK)) 1844 { 1845 *handled_ops_p = false; 1846 break; 1847 } 1848 /* Fall through. */ 1849 default: 1850 /* Save the first "real" statement (not a decl/lexical scope/...). */ 1851 wi->info = stmt; 1852 return integer_zero_node; 1853 } 1854 return NULL_TREE; 1855 } 1856 1857 /* Possibly warn about unreachable statements between switch's controlling 1858 expression and the first case. SEQ is the body of a switch expression. */ 1859 1860 static void 1861 maybe_warn_switch_unreachable (gimple_seq seq) 1862 { 1863 if (!warn_switch_unreachable 1864 /* This warning doesn't play well with Fortran when optimizations 1865 are on. */ 1866 || lang_GNU_Fortran () 1867 || seq == NULL) 1868 return; 1869 1870 struct walk_stmt_info wi; 1871 memset (&wi, 0, sizeof (wi)); 1872 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi); 1873 gimple *stmt = (gimple *) wi.info; 1874 1875 if (stmt && gimple_code (stmt) != GIMPLE_LABEL) 1876 { 1877 if (gimple_code (stmt) == GIMPLE_GOTO 1878 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL 1879 && DECL_ARTIFICIAL (gimple_goto_dest (stmt))) 1880 /* Don't warn for compiler-generated gotos. These occur 1881 in Duff's devices, for example. */; 1882 else 1883 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable, 1884 "statement will never be executed"); 1885 } 1886 } 1887 1888 1889 /* A label entry that pairs label and a location. */ 1890 struct label_entry 1891 { 1892 tree label; 1893 location_t loc; 1894 }; 1895 1896 /* Find LABEL in vector of label entries VEC. */ 1897 1898 static struct label_entry * 1899 find_label_entry (const auto_vec<struct label_entry> *vec, tree label) 1900 { 1901 unsigned int i; 1902 struct label_entry *l; 1903 1904 FOR_EACH_VEC_ELT (*vec, i, l) 1905 if (l->label == label) 1906 return l; 1907 return NULL; 1908 } 1909 1910 /* Return true if LABEL, a LABEL_DECL, represents a case label 1911 in a vector of labels CASES. */ 1912 1913 static bool 1914 case_label_p (const vec<tree> *cases, tree label) 1915 { 1916 unsigned int i; 1917 tree l; 1918 1919 FOR_EACH_VEC_ELT (*cases, i, l) 1920 if (CASE_LABEL (l) == label) 1921 return true; 1922 return false; 1923 } 1924 1925 /* Find the last nondebug statement in a scope STMT. */ 1926 1927 static gimple * 1928 last_stmt_in_scope (gimple *stmt) 1929 { 1930 if (!stmt) 1931 return NULL; 1932 1933 switch (gimple_code (stmt)) 1934 { 1935 case GIMPLE_BIND: 1936 { 1937 gbind *bind = as_a <gbind *> (stmt); 1938 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind)); 1939 return last_stmt_in_scope (stmt); 1940 } 1941 1942 case GIMPLE_TRY: 1943 { 1944 gtry *try_stmt = as_a <gtry *> (stmt); 1945 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt)); 1946 gimple *last_eval = last_stmt_in_scope (stmt); 1947 if (gimple_stmt_may_fallthru (last_eval) 1948 && (last_eval == NULL 1949 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH)) 1950 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY) 1951 { 1952 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt)); 1953 return last_stmt_in_scope (stmt); 1954 } 1955 else 1956 return last_eval; 1957 } 1958 1959 case GIMPLE_DEBUG: 1960 gcc_unreachable (); 1961 1962 default: 1963 return stmt; 1964 } 1965 } 1966 1967 /* Collect interesting labels in LABELS and return the statement preceding 1968 another case label, or a user-defined label. Store a location useful 1969 to give warnings at *PREVLOC (usually the location of the returned 1970 statement or of its surrounding scope). */ 1971 1972 static gimple * 1973 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p, 1974 auto_vec <struct label_entry> *labels, 1975 location_t *prevloc) 1976 { 1977 gimple *prev = NULL; 1978 1979 *prevloc = UNKNOWN_LOCATION; 1980 do 1981 { 1982 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND) 1983 { 1984 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr, 1985 which starts on a GIMPLE_SWITCH and ends with a break label. 1986 Handle that as a single statement that can fall through. */ 1987 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p)); 1988 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind)); 1989 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind)); 1990 if (last 1991 && gimple_code (first) == GIMPLE_SWITCH 1992 && gimple_code (last) == GIMPLE_LABEL) 1993 { 1994 tree label = gimple_label_label (as_a <glabel *> (last)); 1995 if (SWITCH_BREAK_LABEL_P (label)) 1996 { 1997 prev = bind; 1998 gsi_next (gsi_p); 1999 continue; 2000 } 2001 } 2002 } 2003 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND 2004 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY) 2005 { 2006 /* Nested scope. Only look at the last statement of 2007 the innermost scope. */ 2008 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p)); 2009 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p)); 2010 if (last) 2011 { 2012 prev = last; 2013 /* It might be a label without a location. Use the 2014 location of the scope then. */ 2015 if (!gimple_has_location (prev)) 2016 *prevloc = bind_loc; 2017 } 2018 gsi_next (gsi_p); 2019 continue; 2020 } 2021 2022 /* Ifs are tricky. */ 2023 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND) 2024 { 2025 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p)); 2026 tree false_lab = gimple_cond_false_label (cond_stmt); 2027 location_t if_loc = gimple_location (cond_stmt); 2028 2029 /* If we have e.g. 2030 if (i > 1) goto <D.2259>; else goto D; 2031 we can't do much with the else-branch. */ 2032 if (!DECL_ARTIFICIAL (false_lab)) 2033 break; 2034 2035 /* Go on until the false label, then one step back. */ 2036 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p)) 2037 { 2038 gimple *stmt = gsi_stmt (*gsi_p); 2039 if (gimple_code (stmt) == GIMPLE_LABEL 2040 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab) 2041 break; 2042 } 2043 2044 /* Not found? Oops. */ 2045 if (gsi_end_p (*gsi_p)) 2046 break; 2047 2048 struct label_entry l = { false_lab, if_loc }; 2049 labels->safe_push (l); 2050 2051 /* Go to the last statement of the then branch. */ 2052 gsi_prev (gsi_p); 2053 2054 /* if (i != 0) goto <D.1759>; else goto <D.1760>; 2055 <D.1759>: 2056 <stmt>; 2057 goto <D.1761>; 2058 <D.1760>: 2059 */ 2060 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO 2061 && !gimple_has_location (gsi_stmt (*gsi_p))) 2062 { 2063 /* Look at the statement before, it might be 2064 attribute fallthrough, in which case don't warn. */ 2065 gsi_prev (gsi_p); 2066 bool fallthru_before_dest 2067 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH); 2068 gsi_next (gsi_p); 2069 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p)); 2070 if (!fallthru_before_dest) 2071 { 2072 struct label_entry l = { goto_dest, if_loc }; 2073 labels->safe_push (l); 2074 } 2075 } 2076 /* And move back. */ 2077 gsi_next (gsi_p); 2078 } 2079 2080 /* Remember the last statement. Skip labels that are of no interest 2081 to us. */ 2082 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL) 2083 { 2084 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p))); 2085 if (find_label_entry (labels, label)) 2086 prev = gsi_stmt (*gsi_p); 2087 } 2088 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK)) 2089 ; 2090 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT) 2091 ; 2092 else if (!is_gimple_debug (gsi_stmt (*gsi_p))) 2093 prev = gsi_stmt (*gsi_p); 2094 gsi_next (gsi_p); 2095 } 2096 while (!gsi_end_p (*gsi_p) 2097 /* Stop if we find a case or a user-defined label. */ 2098 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL 2099 || !gimple_has_location (gsi_stmt (*gsi_p)))); 2100 2101 if (prev && gimple_has_location (prev)) 2102 *prevloc = gimple_location (prev); 2103 return prev; 2104 } 2105 2106 /* Return true if the switch fallthough warning should occur. LABEL is 2107 the label statement that we're falling through to. */ 2108 2109 static bool 2110 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label) 2111 { 2112 gimple_stmt_iterator gsi = *gsi_p; 2113 2114 /* Don't warn if the label is marked with a "falls through" comment. */ 2115 if (FALLTHROUGH_LABEL_P (label)) 2116 return false; 2117 2118 /* Don't warn for non-case labels followed by a statement: 2119 case 0: 2120 foo (); 2121 label: 2122 bar (); 2123 as these are likely intentional. */ 2124 if (!case_label_p (&gimplify_ctxp->case_labels, label)) 2125 { 2126 tree l; 2127 while (!gsi_end_p (gsi) 2128 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL 2129 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi)))) 2130 && !case_label_p (&gimplify_ctxp->case_labels, l)) 2131 gsi_next_nondebug (&gsi); 2132 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL) 2133 return false; 2134 } 2135 2136 /* Don't warn for terminated branches, i.e. when the subsequent case labels 2137 immediately breaks. */ 2138 gsi = *gsi_p; 2139 2140 /* Skip all immediately following labels. */ 2141 while (!gsi_end_p (gsi) 2142 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL 2143 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT)) 2144 gsi_next_nondebug (&gsi); 2145 2146 /* { ... something; default:; } */ 2147 if (gsi_end_p (gsi) 2148 /* { ... something; default: break; } or 2149 { ... something; default: goto L; } */ 2150 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO 2151 /* { ... something; default: return; } */ 2152 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN) 2153 return false; 2154 2155 return true; 2156 } 2157 2158 /* Callback for walk_gimple_seq. */ 2159 2160 static tree 2161 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, 2162 struct walk_stmt_info *) 2163 { 2164 gimple *stmt = gsi_stmt (*gsi_p); 2165 2166 *handled_ops_p = true; 2167 switch (gimple_code (stmt)) 2168 { 2169 case GIMPLE_TRY: 2170 case GIMPLE_BIND: 2171 case GIMPLE_CATCH: 2172 case GIMPLE_EH_FILTER: 2173 case GIMPLE_TRANSACTION: 2174 /* Walk the sub-statements. */ 2175 *handled_ops_p = false; 2176 break; 2177 2178 /* Find a sequence of form: 2179 2180 GIMPLE_LABEL 2181 [...] 2182 <may fallthru stmt> 2183 GIMPLE_LABEL 2184 2185 and possibly warn. */ 2186 case GIMPLE_LABEL: 2187 { 2188 /* Found a label. Skip all immediately following labels. */ 2189 while (!gsi_end_p (*gsi_p) 2190 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL) 2191 gsi_next_nondebug (gsi_p); 2192 2193 /* There might be no more statements. */ 2194 if (gsi_end_p (*gsi_p)) 2195 return integer_zero_node; 2196 2197 /* Vector of labels that fall through. */ 2198 auto_vec <struct label_entry> labels; 2199 location_t prevloc; 2200 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc); 2201 2202 /* There might be no more statements. */ 2203 if (gsi_end_p (*gsi_p)) 2204 return integer_zero_node; 2205 2206 gimple *next = gsi_stmt (*gsi_p); 2207 tree label; 2208 /* If what follows is a label, then we may have a fallthrough. */ 2209 if (gimple_code (next) == GIMPLE_LABEL 2210 && gimple_has_location (next) 2211 && (label = gimple_label_label (as_a <glabel *> (next))) 2212 && prev != NULL) 2213 { 2214 struct label_entry *l; 2215 bool warned_p = false; 2216 auto_diagnostic_group d; 2217 if (!should_warn_for_implicit_fallthrough (gsi_p, label)) 2218 /* Quiet. */; 2219 else if (gimple_code (prev) == GIMPLE_LABEL 2220 && (label = gimple_label_label (as_a <glabel *> (prev))) 2221 && (l = find_label_entry (&labels, label))) 2222 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_, 2223 "this statement may fall through"); 2224 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH) 2225 /* Try to be clever and don't warn when the statement 2226 can't actually fall through. */ 2227 && gimple_stmt_may_fallthru (prev) 2228 && prevloc != UNKNOWN_LOCATION) 2229 warned_p = warning_at (prevloc, 2230 OPT_Wimplicit_fallthrough_, 2231 "this statement may fall through"); 2232 if (warned_p) 2233 inform (gimple_location (next), "here"); 2234 2235 /* Mark this label as processed so as to prevent multiple 2236 warnings in nested switches. */ 2237 FALLTHROUGH_LABEL_P (label) = true; 2238 2239 /* So that next warn_implicit_fallthrough_r will start looking for 2240 a new sequence starting with this label. */ 2241 gsi_prev (gsi_p); 2242 } 2243 } 2244 break; 2245 default: 2246 break; 2247 } 2248 return NULL_TREE; 2249 } 2250 2251 /* Warn when a switch case falls through. */ 2252 2253 static void 2254 maybe_warn_implicit_fallthrough (gimple_seq seq) 2255 { 2256 if (!warn_implicit_fallthrough) 2257 return; 2258 2259 /* This warning is meant for C/C++/ObjC/ObjC++ only. */ 2260 if (!(lang_GNU_C () 2261 || lang_GNU_CXX () 2262 || lang_GNU_OBJC ())) 2263 return; 2264 2265 struct walk_stmt_info wi; 2266 memset (&wi, 0, sizeof (wi)); 2267 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi); 2268 } 2269 2270 /* Callback for walk_gimple_seq. */ 2271 2272 static tree 2273 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, 2274 struct walk_stmt_info *wi) 2275 { 2276 gimple *stmt = gsi_stmt (*gsi_p); 2277 2278 *handled_ops_p = true; 2279 switch (gimple_code (stmt)) 2280 { 2281 case GIMPLE_TRY: 2282 case GIMPLE_BIND: 2283 case GIMPLE_CATCH: 2284 case GIMPLE_EH_FILTER: 2285 case GIMPLE_TRANSACTION: 2286 /* Walk the sub-statements. */ 2287 *handled_ops_p = false; 2288 break; 2289 case GIMPLE_CALL: 2290 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH)) 2291 { 2292 gsi_remove (gsi_p, true); 2293 if (gsi_end_p (*gsi_p)) 2294 { 2295 *static_cast<location_t *>(wi->info) = gimple_location (stmt); 2296 return integer_zero_node; 2297 } 2298 2299 bool found = false; 2300 location_t loc = gimple_location (stmt); 2301 2302 gimple_stmt_iterator gsi2 = *gsi_p; 2303 stmt = gsi_stmt (gsi2); 2304 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt)) 2305 { 2306 /* Go on until the artificial label. */ 2307 tree goto_dest = gimple_goto_dest (stmt); 2308 for (; !gsi_end_p (gsi2); gsi_next (&gsi2)) 2309 { 2310 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL 2311 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2))) 2312 == goto_dest) 2313 break; 2314 } 2315 2316 /* Not found? Stop. */ 2317 if (gsi_end_p (gsi2)) 2318 break; 2319 2320 /* Look one past it. */ 2321 gsi_next (&gsi2); 2322 } 2323 2324 /* We're looking for a case label or default label here. */ 2325 while (!gsi_end_p (gsi2)) 2326 { 2327 stmt = gsi_stmt (gsi2); 2328 if (gimple_code (stmt) == GIMPLE_LABEL) 2329 { 2330 tree label = gimple_label_label (as_a <glabel *> (stmt)); 2331 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label)) 2332 { 2333 found = true; 2334 break; 2335 } 2336 } 2337 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK)) 2338 ; 2339 else if (!is_gimple_debug (stmt)) 2340 /* Anything else is not expected. */ 2341 break; 2342 gsi_next (&gsi2); 2343 } 2344 if (!found) 2345 warning_at (loc, 0, "attribute %<fallthrough%> not preceding " 2346 "a case label or default label"); 2347 } 2348 break; 2349 default: 2350 break; 2351 } 2352 return NULL_TREE; 2353 } 2354 2355 /* Expand all FALLTHROUGH () calls in SEQ. */ 2356 2357 static void 2358 expand_FALLTHROUGH (gimple_seq *seq_p) 2359 { 2360 struct walk_stmt_info wi; 2361 location_t loc; 2362 memset (&wi, 0, sizeof (wi)); 2363 wi.info = (void *) &loc; 2364 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi); 2365 if (wi.callback_result == integer_zero_node) 2366 /* We've found [[fallthrough]]; at the end of a switch, which the C++ 2367 standard says is ill-formed; see [dcl.attr.fallthrough]. */ 2368 warning_at (loc, 0, "attribute %<fallthrough%> not preceding " 2369 "a case label or default label"); 2370 } 2371 2372 2373 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can 2374 branch to. */ 2375 2376 static enum gimplify_status 2377 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p) 2378 { 2379 tree switch_expr = *expr_p; 2380 gimple_seq switch_body_seq = NULL; 2381 enum gimplify_status ret; 2382 tree index_type = TREE_TYPE (switch_expr); 2383 if (index_type == NULL_TREE) 2384 index_type = TREE_TYPE (SWITCH_COND (switch_expr)); 2385 2386 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val, 2387 fb_rvalue); 2388 if (ret == GS_ERROR || ret == GS_UNHANDLED) 2389 return ret; 2390 2391 if (SWITCH_BODY (switch_expr)) 2392 { 2393 vec<tree> labels; 2394 vec<tree> saved_labels; 2395 hash_set<tree> *saved_live_switch_vars = NULL; 2396 tree default_case = NULL_TREE; 2397 gswitch *switch_stmt; 2398 2399 /* Save old labels, get new ones from body, then restore the old 2400 labels. Save all the things from the switch body to append after. */ 2401 saved_labels = gimplify_ctxp->case_labels; 2402 gimplify_ctxp->case_labels.create (8); 2403 2404 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */ 2405 saved_live_switch_vars = gimplify_ctxp->live_switch_vars; 2406 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr)); 2407 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST) 2408 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4); 2409 else 2410 gimplify_ctxp->live_switch_vars = NULL; 2411 2412 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr; 2413 gimplify_ctxp->in_switch_expr = true; 2414 2415 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq); 2416 2417 gimplify_ctxp->in_switch_expr = old_in_switch_expr; 2418 maybe_warn_switch_unreachable (switch_body_seq); 2419 maybe_warn_implicit_fallthrough (switch_body_seq); 2420 /* Only do this for the outermost GIMPLE_SWITCH. */ 2421 if (!gimplify_ctxp->in_switch_expr) 2422 expand_FALLTHROUGH (&switch_body_seq); 2423 2424 labels = gimplify_ctxp->case_labels; 2425 gimplify_ctxp->case_labels = saved_labels; 2426 2427 if (gimplify_ctxp->live_switch_vars) 2428 { 2429 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0); 2430 delete gimplify_ctxp->live_switch_vars; 2431 } 2432 gimplify_ctxp->live_switch_vars = saved_live_switch_vars; 2433 2434 preprocess_case_label_vec_for_gimple (labels, index_type, 2435 &default_case); 2436 2437 bool add_bind = false; 2438 if (!default_case) 2439 { 2440 glabel *new_default; 2441 2442 default_case 2443 = build_case_label (NULL_TREE, NULL_TREE, 2444 create_artificial_label (UNKNOWN_LOCATION)); 2445 if (old_in_switch_expr) 2446 { 2447 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1; 2448 add_bind = true; 2449 } 2450 new_default = gimple_build_label (CASE_LABEL (default_case)); 2451 gimplify_seq_add_stmt (&switch_body_seq, new_default); 2452 } 2453 else if (old_in_switch_expr) 2454 { 2455 gimple *last = gimple_seq_last_stmt (switch_body_seq); 2456 if (last && gimple_code (last) == GIMPLE_LABEL) 2457 { 2458 tree label = gimple_label_label (as_a <glabel *> (last)); 2459 if (SWITCH_BREAK_LABEL_P (label)) 2460 add_bind = true; 2461 } 2462 } 2463 2464 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr), 2465 default_case, labels); 2466 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq 2467 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL, 2468 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND, 2469 so that we can easily find the start and end of the switch 2470 statement. */ 2471 if (add_bind) 2472 { 2473 gimple_seq bind_body = NULL; 2474 gimplify_seq_add_stmt (&bind_body, switch_stmt); 2475 gimple_seq_add_seq (&bind_body, switch_body_seq); 2476 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE); 2477 gimple_set_location (bind, EXPR_LOCATION (switch_expr)); 2478 gimplify_seq_add_stmt (pre_p, bind); 2479 } 2480 else 2481 { 2482 gimplify_seq_add_stmt (pre_p, switch_stmt); 2483 gimplify_seq_add_seq (pre_p, switch_body_seq); 2484 } 2485 labels.release (); 2486 } 2487 else 2488 gcc_unreachable (); 2489 2490 return GS_ALL_DONE; 2491 } 2492 2493 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */ 2494 2495 static enum gimplify_status 2496 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p) 2497 { 2498 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p)) 2499 == current_function_decl); 2500 2501 tree label = LABEL_EXPR_LABEL (*expr_p); 2502 glabel *label_stmt = gimple_build_label (label); 2503 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p)); 2504 gimplify_seq_add_stmt (pre_p, label_stmt); 2505 2506 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label))) 2507 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL, 2508 NOT_TAKEN)); 2509 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label))) 2510 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL, 2511 TAKEN)); 2512 2513 return GS_ALL_DONE; 2514 } 2515 2516 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */ 2517 2518 static enum gimplify_status 2519 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p) 2520 { 2521 struct gimplify_ctx *ctxp; 2522 glabel *label_stmt; 2523 2524 /* Invalid programs can play Duff's Device type games with, for example, 2525 #pragma omp parallel. At least in the C front end, we don't 2526 detect such invalid branches until after gimplification, in the 2527 diagnose_omp_blocks pass. */ 2528 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context) 2529 if (ctxp->case_labels.exists ()) 2530 break; 2531 2532 tree label = CASE_LABEL (*expr_p); 2533 label_stmt = gimple_build_label (label); 2534 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p)); 2535 ctxp->case_labels.safe_push (*expr_p); 2536 gimplify_seq_add_stmt (pre_p, label_stmt); 2537 2538 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label))) 2539 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL, 2540 NOT_TAKEN)); 2541 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label))) 2542 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL, 2543 TAKEN)); 2544 2545 return GS_ALL_DONE; 2546 } 2547 2548 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first 2549 if necessary. */ 2550 2551 tree 2552 build_and_jump (tree *label_p) 2553 { 2554 if (label_p == NULL) 2555 /* If there's nowhere to jump, just fall through. */ 2556 return NULL_TREE; 2557 2558 if (*label_p == NULL_TREE) 2559 { 2560 tree label = create_artificial_label (UNKNOWN_LOCATION); 2561 *label_p = label; 2562 } 2563 2564 return build1 (GOTO_EXPR, void_type_node, *label_p); 2565 } 2566 2567 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR. 2568 This also involves building a label to jump to and communicating it to 2569 gimplify_loop_expr through gimplify_ctxp->exit_label. */ 2570 2571 static enum gimplify_status 2572 gimplify_exit_expr (tree *expr_p) 2573 { 2574 tree cond = TREE_OPERAND (*expr_p, 0); 2575 tree expr; 2576 2577 expr = build_and_jump (&gimplify_ctxp->exit_label); 2578 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE); 2579 *expr_p = expr; 2580 2581 return GS_OK; 2582 } 2583 2584 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is 2585 different from its canonical type, wrap the whole thing inside a 2586 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical 2587 type. 2588 2589 The canonical type of a COMPONENT_REF is the type of the field being 2590 referenced--unless the field is a bit-field which can be read directly 2591 in a smaller mode, in which case the canonical type is the 2592 sign-appropriate type corresponding to that mode. */ 2593 2594 static void 2595 canonicalize_component_ref (tree *expr_p) 2596 { 2597 tree expr = *expr_p; 2598 tree type; 2599 2600 gcc_assert (TREE_CODE (expr) == COMPONENT_REF); 2601 2602 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))) 2603 type = TREE_TYPE (get_unwidened (expr, NULL_TREE)); 2604 else 2605 type = TREE_TYPE (TREE_OPERAND (expr, 1)); 2606 2607 /* One could argue that all the stuff below is not necessary for 2608 the non-bitfield case and declare it a FE error if type 2609 adjustment would be needed. */ 2610 if (TREE_TYPE (expr) != type) 2611 { 2612 #ifdef ENABLE_TYPES_CHECKING 2613 tree old_type = TREE_TYPE (expr); 2614 #endif 2615 int type_quals; 2616 2617 /* We need to preserve qualifiers and propagate them from 2618 operand 0. */ 2619 type_quals = TYPE_QUALS (type) 2620 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0))); 2621 if (TYPE_QUALS (type) != type_quals) 2622 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals); 2623 2624 /* Set the type of the COMPONENT_REF to the underlying type. */ 2625 TREE_TYPE (expr) = type; 2626 2627 #ifdef ENABLE_TYPES_CHECKING 2628 /* It is now a FE error, if the conversion from the canonical 2629 type to the original expression type is not useless. */ 2630 gcc_assert (useless_type_conversion_p (old_type, type)); 2631 #endif 2632 } 2633 } 2634 2635 /* If a NOP conversion is changing a pointer to array of foo to a pointer 2636 to foo, embed that change in the ADDR_EXPR by converting 2637 T array[U]; 2638 (T *)&array 2639 ==> 2640 &array[L] 2641 where L is the lower bound. For simplicity, only do this for constant 2642 lower bound. 2643 The constraint is that the type of &array[L] is trivially convertible 2644 to T *. */ 2645 2646 static void 2647 canonicalize_addr_expr (tree *expr_p) 2648 { 2649 tree expr = *expr_p; 2650 tree addr_expr = TREE_OPERAND (expr, 0); 2651 tree datype, ddatype, pddatype; 2652 2653 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */ 2654 if (!POINTER_TYPE_P (TREE_TYPE (expr)) 2655 || TREE_CODE (addr_expr) != ADDR_EXPR) 2656 return; 2657 2658 /* The addr_expr type should be a pointer to an array. */ 2659 datype = TREE_TYPE (TREE_TYPE (addr_expr)); 2660 if (TREE_CODE (datype) != ARRAY_TYPE) 2661 return; 2662 2663 /* The pointer to element type shall be trivially convertible to 2664 the expression pointer type. */ 2665 ddatype = TREE_TYPE (datype); 2666 pddatype = build_pointer_type (ddatype); 2667 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)), 2668 pddatype)) 2669 return; 2670 2671 /* The lower bound and element sizes must be constant. */ 2672 if (!TYPE_SIZE_UNIT (ddatype) 2673 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST 2674 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype)) 2675 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST) 2676 return; 2677 2678 /* All checks succeeded. Build a new node to merge the cast. */ 2679 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0), 2680 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)), 2681 NULL_TREE, NULL_TREE); 2682 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p); 2683 2684 /* We can have stripped a required restrict qualifier above. */ 2685 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p))) 2686 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p); 2687 } 2688 2689 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions 2690 underneath as appropriate. */ 2691 2692 static enum gimplify_status 2693 gimplify_conversion (tree *expr_p) 2694 { 2695 location_t loc = EXPR_LOCATION (*expr_p); 2696 gcc_assert (CONVERT_EXPR_P (*expr_p)); 2697 2698 /* Then strip away all but the outermost conversion. */ 2699 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0)); 2700 2701 /* And remove the outermost conversion if it's useless. */ 2702 if (tree_ssa_useless_type_conversion (*expr_p)) 2703 *expr_p = TREE_OPERAND (*expr_p, 0); 2704 2705 /* If we still have a conversion at the toplevel, 2706 then canonicalize some constructs. */ 2707 if (CONVERT_EXPR_P (*expr_p)) 2708 { 2709 tree sub = TREE_OPERAND (*expr_p, 0); 2710 2711 /* If a NOP conversion is changing the type of a COMPONENT_REF 2712 expression, then canonicalize its type now in order to expose more 2713 redundant conversions. */ 2714 if (TREE_CODE (sub) == COMPONENT_REF) 2715 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0)); 2716 2717 /* If a NOP conversion is changing a pointer to array of foo 2718 to a pointer to foo, embed that change in the ADDR_EXPR. */ 2719 else if (TREE_CODE (sub) == ADDR_EXPR) 2720 canonicalize_addr_expr (expr_p); 2721 } 2722 2723 /* If we have a conversion to a non-register type force the 2724 use of a VIEW_CONVERT_EXPR instead. */ 2725 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p))) 2726 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p), 2727 TREE_OPERAND (*expr_p, 0)); 2728 2729 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */ 2730 if (TREE_CODE (*expr_p) == CONVERT_EXPR) 2731 TREE_SET_CODE (*expr_p, NOP_EXPR); 2732 2733 return GS_OK; 2734 } 2735 2736 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a 2737 DECL_VALUE_EXPR, and it's worth re-examining things. */ 2738 2739 static enum gimplify_status 2740 gimplify_var_or_parm_decl (tree *expr_p) 2741 { 2742 tree decl = *expr_p; 2743 2744 /* ??? If this is a local variable, and it has not been seen in any 2745 outer BIND_EXPR, then it's probably the result of a duplicate 2746 declaration, for which we've already issued an error. It would 2747 be really nice if the front end wouldn't leak these at all. 2748 Currently the only known culprit is C++ destructors, as seen 2749 in g++.old-deja/g++.jason/binding.C. */ 2750 if (VAR_P (decl) 2751 && !DECL_SEEN_IN_BIND_EXPR_P (decl) 2752 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl) 2753 && decl_function_context (decl) == current_function_decl) 2754 { 2755 gcc_assert (seen_error ()); 2756 return GS_ERROR; 2757 } 2758 2759 /* When within an OMP context, notice uses of variables. */ 2760 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true)) 2761 return GS_ALL_DONE; 2762 2763 /* If the decl is an alias for another expression, substitute it now. */ 2764 if (DECL_HAS_VALUE_EXPR_P (decl)) 2765 { 2766 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl)); 2767 return GS_OK; 2768 } 2769 2770 return GS_ALL_DONE; 2771 } 2772 2773 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */ 2774 2775 static void 2776 recalculate_side_effects (tree t) 2777 { 2778 enum tree_code code = TREE_CODE (t); 2779 int len = TREE_OPERAND_LENGTH (t); 2780 int i; 2781 2782 switch (TREE_CODE_CLASS (code)) 2783 { 2784 case tcc_expression: 2785 switch (code) 2786 { 2787 case INIT_EXPR: 2788 case MODIFY_EXPR: 2789 case VA_ARG_EXPR: 2790 case PREDECREMENT_EXPR: 2791 case PREINCREMENT_EXPR: 2792 case POSTDECREMENT_EXPR: 2793 case POSTINCREMENT_EXPR: 2794 /* All of these have side-effects, no matter what their 2795 operands are. */ 2796 return; 2797 2798 default: 2799 break; 2800 } 2801 /* Fall through. */ 2802 2803 case tcc_comparison: /* a comparison expression */ 2804 case tcc_unary: /* a unary arithmetic expression */ 2805 case tcc_binary: /* a binary arithmetic expression */ 2806 case tcc_reference: /* a reference */ 2807 case tcc_vl_exp: /* a function call */ 2808 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t); 2809 for (i = 0; i < len; ++i) 2810 { 2811 tree op = TREE_OPERAND (t, i); 2812 if (op && TREE_SIDE_EFFECTS (op)) 2813 TREE_SIDE_EFFECTS (t) = 1; 2814 } 2815 break; 2816 2817 case tcc_constant: 2818 /* No side-effects. */ 2819 return; 2820 2821 default: 2822 gcc_unreachable (); 2823 } 2824 } 2825 2826 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR 2827 node *EXPR_P. 2828 2829 compound_lval 2830 : min_lval '[' val ']' 2831 | min_lval '.' ID 2832 | compound_lval '[' val ']' 2833 | compound_lval '.' ID 2834 2835 This is not part of the original SIMPLE definition, which separates 2836 array and member references, but it seems reasonable to handle them 2837 together. Also, this way we don't run into problems with union 2838 aliasing; gcc requires that for accesses through a union to alias, the 2839 union reference must be explicit, which was not always the case when we 2840 were splitting up array and member refs. 2841 2842 PRE_P points to the sequence where side effects that must happen before 2843 *EXPR_P should be stored. 2844 2845 POST_P points to the sequence where side effects that must happen after 2846 *EXPR_P should be stored. */ 2847 2848 static enum gimplify_status 2849 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 2850 fallback_t fallback) 2851 { 2852 tree *p; 2853 enum gimplify_status ret = GS_ALL_DONE, tret; 2854 int i; 2855 location_t loc = EXPR_LOCATION (*expr_p); 2856 tree expr = *expr_p; 2857 2858 /* Create a stack of the subexpressions so later we can walk them in 2859 order from inner to outer. */ 2860 auto_vec<tree, 10> expr_stack; 2861 2862 /* We can handle anything that get_inner_reference can deal with. */ 2863 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0)) 2864 { 2865 restart: 2866 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */ 2867 if (TREE_CODE (*p) == INDIRECT_REF) 2868 *p = fold_indirect_ref_loc (loc, *p); 2869 2870 if (handled_component_p (*p)) 2871 ; 2872 /* Expand DECL_VALUE_EXPR now. In some cases that may expose 2873 additional COMPONENT_REFs. */ 2874 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL) 2875 && gimplify_var_or_parm_decl (p) == GS_OK) 2876 goto restart; 2877 else 2878 break; 2879 2880 expr_stack.safe_push (*p); 2881 } 2882 2883 gcc_assert (expr_stack.length ()); 2884 2885 /* Now EXPR_STACK is a stack of pointers to all the refs we've 2886 walked through and P points to the innermost expression. 2887 2888 Java requires that we elaborated nodes in source order. That 2889 means we must gimplify the inner expression followed by each of 2890 the indices, in order. But we can't gimplify the inner 2891 expression until we deal with any variable bounds, sizes, or 2892 positions in order to deal with PLACEHOLDER_EXPRs. 2893 2894 So we do this in three steps. First we deal with the annotations 2895 for any variables in the components, then we gimplify the base, 2896 then we gimplify any indices, from left to right. */ 2897 for (i = expr_stack.length () - 1; i >= 0; i--) 2898 { 2899 tree t = expr_stack[i]; 2900 2901 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) 2902 { 2903 /* Gimplify the low bound and element type size and put them into 2904 the ARRAY_REF. If these values are set, they have already been 2905 gimplified. */ 2906 if (TREE_OPERAND (t, 2) == NULL_TREE) 2907 { 2908 tree low = unshare_expr (array_ref_low_bound (t)); 2909 if (!is_gimple_min_invariant (low)) 2910 { 2911 TREE_OPERAND (t, 2) = low; 2912 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, 2913 post_p, is_gimple_reg, 2914 fb_rvalue); 2915 ret = MIN (ret, tret); 2916 } 2917 } 2918 else 2919 { 2920 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, 2921 is_gimple_reg, fb_rvalue); 2922 ret = MIN (ret, tret); 2923 } 2924 2925 if (TREE_OPERAND (t, 3) == NULL_TREE) 2926 { 2927 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0))); 2928 tree elmt_size = unshare_expr (array_ref_element_size (t)); 2929 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type)); 2930 2931 /* Divide the element size by the alignment of the element 2932 type (above). */ 2933 elmt_size 2934 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor); 2935 2936 if (!is_gimple_min_invariant (elmt_size)) 2937 { 2938 TREE_OPERAND (t, 3) = elmt_size; 2939 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, 2940 post_p, is_gimple_reg, 2941 fb_rvalue); 2942 ret = MIN (ret, tret); 2943 } 2944 } 2945 else 2946 { 2947 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p, 2948 is_gimple_reg, fb_rvalue); 2949 ret = MIN (ret, tret); 2950 } 2951 } 2952 else if (TREE_CODE (t) == COMPONENT_REF) 2953 { 2954 /* Set the field offset into T and gimplify it. */ 2955 if (TREE_OPERAND (t, 2) == NULL_TREE) 2956 { 2957 tree offset = unshare_expr (component_ref_field_offset (t)); 2958 tree field = TREE_OPERAND (t, 1); 2959 tree factor 2960 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT); 2961 2962 /* Divide the offset by its alignment. */ 2963 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor); 2964 2965 if (!is_gimple_min_invariant (offset)) 2966 { 2967 TREE_OPERAND (t, 2) = offset; 2968 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, 2969 post_p, is_gimple_reg, 2970 fb_rvalue); 2971 ret = MIN (ret, tret); 2972 } 2973 } 2974 else 2975 { 2976 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, 2977 is_gimple_reg, fb_rvalue); 2978 ret = MIN (ret, tret); 2979 } 2980 } 2981 } 2982 2983 /* Step 2 is to gimplify the base expression. Make sure lvalue is set 2984 so as to match the min_lval predicate. Failure to do so may result 2985 in the creation of large aggregate temporaries. */ 2986 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, 2987 fallback | fb_lvalue); 2988 ret = MIN (ret, tret); 2989 2990 /* And finally, the indices and operands of ARRAY_REF. During this 2991 loop we also remove any useless conversions. */ 2992 for (; expr_stack.length () > 0; ) 2993 { 2994 tree t = expr_stack.pop (); 2995 2996 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) 2997 { 2998 /* Gimplify the dimension. */ 2999 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))) 3000 { 3001 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p, 3002 is_gimple_val, fb_rvalue); 3003 ret = MIN (ret, tret); 3004 } 3005 } 3006 3007 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0)); 3008 3009 /* The innermost expression P may have originally had 3010 TREE_SIDE_EFFECTS set which would have caused all the outer 3011 expressions in *EXPR_P leading to P to also have had 3012 TREE_SIDE_EFFECTS set. */ 3013 recalculate_side_effects (t); 3014 } 3015 3016 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */ 3017 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF) 3018 { 3019 canonicalize_component_ref (expr_p); 3020 } 3021 3022 expr_stack.release (); 3023 3024 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE); 3025 3026 return ret; 3027 } 3028 3029 /* Gimplify the self modifying expression pointed to by EXPR_P 3030 (++, --, +=, -=). 3031 3032 PRE_P points to the list where side effects that must happen before 3033 *EXPR_P should be stored. 3034 3035 POST_P points to the list where side effects that must happen after 3036 *EXPR_P should be stored. 3037 3038 WANT_VALUE is nonzero iff we want to use the value of this expression 3039 in another expression. 3040 3041 ARITH_TYPE is the type the computation should be performed in. */ 3042 3043 enum gimplify_status 3044 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 3045 bool want_value, tree arith_type) 3046 { 3047 enum tree_code code; 3048 tree lhs, lvalue, rhs, t1; 3049 gimple_seq post = NULL, *orig_post_p = post_p; 3050 bool postfix; 3051 enum tree_code arith_code; 3052 enum gimplify_status ret; 3053 location_t loc = EXPR_LOCATION (*expr_p); 3054 3055 code = TREE_CODE (*expr_p); 3056 3057 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR 3058 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR); 3059 3060 /* Prefix or postfix? */ 3061 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR) 3062 /* Faster to treat as prefix if result is not used. */ 3063 postfix = want_value; 3064 else 3065 postfix = false; 3066 3067 /* For postfix, make sure the inner expression's post side effects 3068 are executed after side effects from this expression. */ 3069 if (postfix) 3070 post_p = &post; 3071 3072 /* Add or subtract? */ 3073 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) 3074 arith_code = PLUS_EXPR; 3075 else 3076 arith_code = MINUS_EXPR; 3077 3078 /* Gimplify the LHS into a GIMPLE lvalue. */ 3079 lvalue = TREE_OPERAND (*expr_p, 0); 3080 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue); 3081 if (ret == GS_ERROR) 3082 return ret; 3083 3084 /* Extract the operands to the arithmetic operation. */ 3085 lhs = lvalue; 3086 rhs = TREE_OPERAND (*expr_p, 1); 3087 3088 /* For postfix operator, we evaluate the LHS to an rvalue and then use 3089 that as the result value and in the postqueue operation. */ 3090 if (postfix) 3091 { 3092 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue); 3093 if (ret == GS_ERROR) 3094 return ret; 3095 3096 lhs = get_initialized_tmp_var (lhs, pre_p, NULL); 3097 } 3098 3099 /* For POINTERs increment, use POINTER_PLUS_EXPR. */ 3100 if (POINTER_TYPE_P (TREE_TYPE (lhs))) 3101 { 3102 rhs = convert_to_ptrofftype_loc (loc, rhs); 3103 if (arith_code == MINUS_EXPR) 3104 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs); 3105 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs); 3106 } 3107 else 3108 t1 = fold_convert (TREE_TYPE (*expr_p), 3109 fold_build2 (arith_code, arith_type, 3110 fold_convert (arith_type, lhs), 3111 fold_convert (arith_type, rhs))); 3112 3113 if (postfix) 3114 { 3115 gimplify_assign (lvalue, t1, pre_p); 3116 gimplify_seq_add_seq (orig_post_p, post); 3117 *expr_p = lhs; 3118 return GS_ALL_DONE; 3119 } 3120 else 3121 { 3122 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1); 3123 return GS_OK; 3124 } 3125 } 3126 3127 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */ 3128 3129 static void 3130 maybe_with_size_expr (tree *expr_p) 3131 { 3132 tree expr = *expr_p; 3133 tree type = TREE_TYPE (expr); 3134 tree size; 3135 3136 /* If we've already wrapped this or the type is error_mark_node, we can't do 3137 anything. */ 3138 if (TREE_CODE (expr) == WITH_SIZE_EXPR 3139 || type == error_mark_node) 3140 return; 3141 3142 /* If the size isn't known or is a constant, we have nothing to do. */ 3143 size = TYPE_SIZE_UNIT (type); 3144 if (!size || poly_int_tree_p (size)) 3145 return; 3146 3147 /* Otherwise, make a WITH_SIZE_EXPR. */ 3148 size = unshare_expr (size); 3149 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr); 3150 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size); 3151 } 3152 3153 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P 3154 Store any side-effects in PRE_P. CALL_LOCATION is the location of 3155 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be 3156 gimplified to an SSA name. */ 3157 3158 enum gimplify_status 3159 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location, 3160 bool allow_ssa) 3161 { 3162 bool (*test) (tree); 3163 fallback_t fb; 3164 3165 /* In general, we allow lvalues for function arguments to avoid 3166 extra overhead of copying large aggregates out of even larger 3167 aggregates into temporaries only to copy the temporaries to 3168 the argument list. Make optimizers happy by pulling out to 3169 temporaries those types that fit in registers. */ 3170 if (is_gimple_reg_type (TREE_TYPE (*arg_p))) 3171 test = is_gimple_val, fb = fb_rvalue; 3172 else 3173 { 3174 test = is_gimple_lvalue, fb = fb_either; 3175 /* Also strip a TARGET_EXPR that would force an extra copy. */ 3176 if (TREE_CODE (*arg_p) == TARGET_EXPR) 3177 { 3178 tree init = TARGET_EXPR_INITIAL (*arg_p); 3179 if (init 3180 && !VOID_TYPE_P (TREE_TYPE (init))) 3181 *arg_p = init; 3182 } 3183 } 3184 3185 /* If this is a variable sized type, we must remember the size. */ 3186 maybe_with_size_expr (arg_p); 3187 3188 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */ 3189 /* Make sure arguments have the same location as the function call 3190 itself. */ 3191 protected_set_expr_location (*arg_p, call_location); 3192 3193 /* There is a sequence point before a function call. Side effects in 3194 the argument list must occur before the actual call. So, when 3195 gimplifying arguments, force gimplify_expr to use an internal 3196 post queue which is then appended to the end of PRE_P. */ 3197 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa); 3198 } 3199 3200 /* Don't fold inside offloading or taskreg regions: it can break code by 3201 adding decl references that weren't in the source. We'll do it during 3202 omplower pass instead. */ 3203 3204 static bool 3205 maybe_fold_stmt (gimple_stmt_iterator *gsi) 3206 { 3207 struct gimplify_omp_ctx *ctx; 3208 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context) 3209 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0) 3210 return false; 3211 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS) 3212 return false; 3213 /* Delay folding of builtins until the IL is in consistent state 3214 so the diagnostic machinery can do a better job. */ 3215 if (gimple_call_builtin_p (gsi_stmt (*gsi))) 3216 return false; 3217 return fold_stmt (gsi); 3218 } 3219 3220 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P. 3221 WANT_VALUE is true if the result of the call is desired. */ 3222 3223 static enum gimplify_status 3224 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) 3225 { 3226 tree fndecl, parms, p, fnptrtype; 3227 enum gimplify_status ret; 3228 int i, nargs; 3229 gcall *call; 3230 bool builtin_va_start_p = false; 3231 location_t loc = EXPR_LOCATION (*expr_p); 3232 3233 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR); 3234 3235 /* For reliable diagnostics during inlining, it is necessary that 3236 every call_expr be annotated with file and line. */ 3237 if (! EXPR_HAS_LOCATION (*expr_p)) 3238 SET_EXPR_LOCATION (*expr_p, input_location); 3239 3240 /* Gimplify internal functions created in the FEs. */ 3241 if (CALL_EXPR_FN (*expr_p) == NULL_TREE) 3242 { 3243 if (want_value) 3244 return GS_ALL_DONE; 3245 3246 nargs = call_expr_nargs (*expr_p); 3247 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p); 3248 auto_vec<tree> vargs (nargs); 3249 3250 for (i = 0; i < nargs; i++) 3251 { 3252 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, 3253 EXPR_LOCATION (*expr_p)); 3254 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i)); 3255 } 3256 3257 gcall *call = gimple_build_call_internal_vec (ifn, vargs); 3258 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p)); 3259 gimplify_seq_add_stmt (pre_p, call); 3260 return GS_ALL_DONE; 3261 } 3262 3263 /* This may be a call to a builtin function. 3264 3265 Builtin function calls may be transformed into different 3266 (and more efficient) builtin function calls under certain 3267 circumstances. Unfortunately, gimplification can muck things 3268 up enough that the builtin expanders are not aware that certain 3269 transformations are still valid. 3270 3271 So we attempt transformation/gimplification of the call before 3272 we gimplify the CALL_EXPR. At this time we do not manage to 3273 transform all calls in the same manner as the expanders do, but 3274 we do transform most of them. */ 3275 fndecl = get_callee_fndecl (*expr_p); 3276 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)) 3277 switch (DECL_FUNCTION_CODE (fndecl)) 3278 { 3279 CASE_BUILT_IN_ALLOCA: 3280 /* If the call has been built for a variable-sized object, then we 3281 want to restore the stack level when the enclosing BIND_EXPR is 3282 exited to reclaim the allocated space; otherwise, we precisely 3283 need to do the opposite and preserve the latest stack level. */ 3284 if (CALL_ALLOCA_FOR_VAR_P (*expr_p)) 3285 gimplify_ctxp->save_stack = true; 3286 else 3287 gimplify_ctxp->keep_stack = true; 3288 break; 3289 3290 case BUILT_IN_VA_START: 3291 { 3292 builtin_va_start_p = TRUE; 3293 if (call_expr_nargs (*expr_p) < 2) 3294 { 3295 error ("too few arguments to function %<va_start%>"); 3296 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p)); 3297 return GS_OK; 3298 } 3299 3300 if (fold_builtin_next_arg (*expr_p, true)) 3301 { 3302 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p)); 3303 return GS_OK; 3304 } 3305 break; 3306 } 3307 3308 default: 3309 ; 3310 } 3311 if (fndecl && fndecl_built_in_p (fndecl)) 3312 { 3313 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value); 3314 if (new_tree && new_tree != *expr_p) 3315 { 3316 /* There was a transformation of this call which computes the 3317 same value, but in a more efficient way. Return and try 3318 again. */ 3319 *expr_p = new_tree; 3320 return GS_OK; 3321 } 3322 } 3323 3324 /* Remember the original function pointer type. */ 3325 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p)); 3326 3327 /* There is a sequence point before the call, so any side effects in 3328 the calling expression must occur before the actual call. Force 3329 gimplify_expr to use an internal post queue. */ 3330 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL, 3331 is_gimple_call_addr, fb_rvalue); 3332 3333 nargs = call_expr_nargs (*expr_p); 3334 3335 /* Get argument types for verification. */ 3336 fndecl = get_callee_fndecl (*expr_p); 3337 parms = NULL_TREE; 3338 if (fndecl) 3339 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); 3340 else 3341 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype)); 3342 3343 if (fndecl && DECL_ARGUMENTS (fndecl)) 3344 p = DECL_ARGUMENTS (fndecl); 3345 else if (parms) 3346 p = parms; 3347 else 3348 p = NULL_TREE; 3349 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p)) 3350 ; 3351 3352 /* If the last argument is __builtin_va_arg_pack () and it is not 3353 passed as a named argument, decrease the number of CALL_EXPR 3354 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */ 3355 if (!p 3356 && i < nargs 3357 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR) 3358 { 3359 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1); 3360 tree last_arg_fndecl = get_callee_fndecl (last_arg); 3361 3362 if (last_arg_fndecl 3363 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK)) 3364 { 3365 tree call = *expr_p; 3366 3367 --nargs; 3368 *expr_p = build_call_array_loc (loc, TREE_TYPE (call), 3369 CALL_EXPR_FN (call), 3370 nargs, CALL_EXPR_ARGP (call)); 3371 3372 /* Copy all CALL_EXPR flags, location and block, except 3373 CALL_EXPR_VA_ARG_PACK flag. */ 3374 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call); 3375 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call); 3376 CALL_EXPR_RETURN_SLOT_OPT (*expr_p) 3377 = CALL_EXPR_RETURN_SLOT_OPT (call); 3378 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call); 3379 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call)); 3380 3381 /* Set CALL_EXPR_VA_ARG_PACK. */ 3382 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1; 3383 } 3384 } 3385 3386 /* If the call returns twice then after building the CFG the call 3387 argument computations will no longer dominate the call because 3388 we add an abnormal incoming edge to the call. So do not use SSA 3389 vars there. */ 3390 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE; 3391 3392 /* Gimplify the function arguments. */ 3393 if (nargs > 0) 3394 { 3395 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0); 3396 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs; 3397 PUSH_ARGS_REVERSED ? i-- : i++) 3398 { 3399 enum gimplify_status t; 3400 3401 /* Avoid gimplifying the second argument to va_start, which needs to 3402 be the plain PARM_DECL. */ 3403 if ((i != 1) || !builtin_va_start_p) 3404 { 3405 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, 3406 EXPR_LOCATION (*expr_p), ! returns_twice); 3407 3408 if (t == GS_ERROR) 3409 ret = GS_ERROR; 3410 } 3411 } 3412 } 3413 3414 /* Gimplify the static chain. */ 3415 if (CALL_EXPR_STATIC_CHAIN (*expr_p)) 3416 { 3417 if (fndecl && !DECL_STATIC_CHAIN (fndecl)) 3418 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL; 3419 else 3420 { 3421 enum gimplify_status t; 3422 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p, 3423 EXPR_LOCATION (*expr_p), ! returns_twice); 3424 if (t == GS_ERROR) 3425 ret = GS_ERROR; 3426 } 3427 } 3428 3429 /* Verify the function result. */ 3430 if (want_value && fndecl 3431 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype)))) 3432 { 3433 error_at (loc, "using result of function returning %<void%>"); 3434 ret = GS_ERROR; 3435 } 3436 3437 /* Try this again in case gimplification exposed something. */ 3438 if (ret != GS_ERROR) 3439 { 3440 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value); 3441 3442 if (new_tree && new_tree != *expr_p) 3443 { 3444 /* There was a transformation of this call which computes the 3445 same value, but in a more efficient way. Return and try 3446 again. */ 3447 *expr_p = new_tree; 3448 return GS_OK; 3449 } 3450 } 3451 else 3452 { 3453 *expr_p = error_mark_node; 3454 return GS_ERROR; 3455 } 3456 3457 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its 3458 decl. This allows us to eliminate redundant or useless 3459 calls to "const" functions. */ 3460 if (TREE_CODE (*expr_p) == CALL_EXPR) 3461 { 3462 int flags = call_expr_flags (*expr_p); 3463 if (flags & (ECF_CONST | ECF_PURE) 3464 /* An infinite loop is considered a side effect. */ 3465 && !(flags & (ECF_LOOPING_CONST_OR_PURE))) 3466 TREE_SIDE_EFFECTS (*expr_p) = 0; 3467 } 3468 3469 /* If the value is not needed by the caller, emit a new GIMPLE_CALL 3470 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified 3471 form and delegate the creation of a GIMPLE_CALL to 3472 gimplify_modify_expr. This is always possible because when 3473 WANT_VALUE is true, the caller wants the result of this call into 3474 a temporary, which means that we will emit an INIT_EXPR in 3475 internal_get_tmp_var which will then be handled by 3476 gimplify_modify_expr. */ 3477 if (!want_value) 3478 { 3479 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we 3480 have to do is replicate it as a GIMPLE_CALL tuple. */ 3481 gimple_stmt_iterator gsi; 3482 call = gimple_build_call_from_tree (*expr_p, fnptrtype); 3483 notice_special_calls (call); 3484 gimplify_seq_add_stmt (pre_p, call); 3485 gsi = gsi_last (*pre_p); 3486 maybe_fold_stmt (&gsi); 3487 *expr_p = NULL_TREE; 3488 } 3489 else 3490 /* Remember the original function type. */ 3491 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype, 3492 CALL_EXPR_FN (*expr_p)); 3493 3494 return ret; 3495 } 3496 3497 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by 3498 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs. 3499 3500 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the 3501 condition is true or false, respectively. If null, we should generate 3502 our own to skip over the evaluation of this specific expression. 3503 3504 LOCUS is the source location of the COND_EXPR. 3505 3506 This function is the tree equivalent of do_jump. 3507 3508 shortcut_cond_r should only be called by shortcut_cond_expr. */ 3509 3510 static tree 3511 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p, 3512 location_t locus) 3513 { 3514 tree local_label = NULL_TREE; 3515 tree t, expr = NULL; 3516 3517 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to 3518 retain the shortcut semantics. Just insert the gotos here; 3519 shortcut_cond_expr will append the real blocks later. */ 3520 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) 3521 { 3522 location_t new_locus; 3523 3524 /* Turn if (a && b) into 3525 3526 if (a); else goto no; 3527 if (b) goto yes; else goto no; 3528 (no:) */ 3529 3530 if (false_label_p == NULL) 3531 false_label_p = &local_label; 3532 3533 /* Keep the original source location on the first 'if'. */ 3534 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus); 3535 append_to_statement_list (t, &expr); 3536 3537 /* Set the source location of the && on the second 'if'. */ 3538 new_locus = rexpr_location (pred, locus); 3539 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p, 3540 new_locus); 3541 append_to_statement_list (t, &expr); 3542 } 3543 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR) 3544 { 3545 location_t new_locus; 3546 3547 /* Turn if (a || b) into 3548 3549 if (a) goto yes; 3550 if (b) goto yes; else goto no; 3551 (yes:) */ 3552 3553 if (true_label_p == NULL) 3554 true_label_p = &local_label; 3555 3556 /* Keep the original source location on the first 'if'. */ 3557 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus); 3558 append_to_statement_list (t, &expr); 3559 3560 /* Set the source location of the || on the second 'if'. */ 3561 new_locus = rexpr_location (pred, locus); 3562 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p, 3563 new_locus); 3564 append_to_statement_list (t, &expr); 3565 } 3566 else if (TREE_CODE (pred) == COND_EXPR 3567 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1))) 3568 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2)))) 3569 { 3570 location_t new_locus; 3571 3572 /* As long as we're messing with gotos, turn if (a ? b : c) into 3573 if (a) 3574 if (b) goto yes; else goto no; 3575 else 3576 if (c) goto yes; else goto no; 3577 3578 Don't do this if one of the arms has void type, which can happen 3579 in C++ when the arm is throw. */ 3580 3581 /* Keep the original source location on the first 'if'. Set the source 3582 location of the ? on the second 'if'. */ 3583 new_locus = rexpr_location (pred, locus); 3584 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0), 3585 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, 3586 false_label_p, locus), 3587 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p, 3588 false_label_p, new_locus)); 3589 } 3590 else 3591 { 3592 expr = build3 (COND_EXPR, void_type_node, pred, 3593 build_and_jump (true_label_p), 3594 build_and_jump (false_label_p)); 3595 SET_EXPR_LOCATION (expr, locus); 3596 } 3597 3598 if (local_label) 3599 { 3600 t = build1 (LABEL_EXPR, void_type_node, local_label); 3601 append_to_statement_list (t, &expr); 3602 } 3603 3604 return expr; 3605 } 3606 3607 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip 3608 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent 3609 statement, if it is the last one. Otherwise, return NULL. */ 3610 3611 static tree 3612 find_goto (tree expr) 3613 { 3614 if (!expr) 3615 return NULL_TREE; 3616 3617 if (TREE_CODE (expr) == GOTO_EXPR) 3618 return expr; 3619 3620 if (TREE_CODE (expr) != STATEMENT_LIST) 3621 return NULL_TREE; 3622 3623 tree_stmt_iterator i = tsi_start (expr); 3624 3625 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT) 3626 tsi_next (&i); 3627 3628 if (!tsi_one_before_end_p (i)) 3629 return NULL_TREE; 3630 3631 return find_goto (tsi_stmt (i)); 3632 } 3633 3634 /* Same as find_goto, except that it returns NULL if the destination 3635 is not a LABEL_DECL. */ 3636 3637 static inline tree 3638 find_goto_label (tree expr) 3639 { 3640 tree dest = find_goto (expr); 3641 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL) 3642 return dest; 3643 return NULL_TREE; 3644 } 3645 3646 /* Given a conditional expression EXPR with short-circuit boolean 3647 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the 3648 predicate apart into the equivalent sequence of conditionals. */ 3649 3650 static tree 3651 shortcut_cond_expr (tree expr) 3652 { 3653 tree pred = TREE_OPERAND (expr, 0); 3654 tree then_ = TREE_OPERAND (expr, 1); 3655 tree else_ = TREE_OPERAND (expr, 2); 3656 tree true_label, false_label, end_label, t; 3657 tree *true_label_p; 3658 tree *false_label_p; 3659 bool emit_end, emit_false, jump_over_else; 3660 bool then_se = then_ && TREE_SIDE_EFFECTS (then_); 3661 bool else_se = else_ && TREE_SIDE_EFFECTS (else_); 3662 3663 /* First do simple transformations. */ 3664 if (!else_se) 3665 { 3666 /* If there is no 'else', turn 3667 if (a && b) then c 3668 into 3669 if (a) if (b) then c. */ 3670 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) 3671 { 3672 /* Keep the original source location on the first 'if'. */ 3673 location_t locus = EXPR_LOC_OR_LOC (expr, input_location); 3674 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); 3675 /* Set the source location of the && on the second 'if'. */ 3676 if (rexpr_has_location (pred)) 3677 SET_EXPR_LOCATION (expr, rexpr_location (pred)); 3678 then_ = shortcut_cond_expr (expr); 3679 then_se = then_ && TREE_SIDE_EFFECTS (then_); 3680 pred = TREE_OPERAND (pred, 0); 3681 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE); 3682 SET_EXPR_LOCATION (expr, locus); 3683 } 3684 } 3685 3686 if (!then_se) 3687 { 3688 /* If there is no 'then', turn 3689 if (a || b); else d 3690 into 3691 if (a); else if (b); else d. */ 3692 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR) 3693 { 3694 /* Keep the original source location on the first 'if'. */ 3695 location_t locus = EXPR_LOC_OR_LOC (expr, input_location); 3696 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); 3697 /* Set the source location of the || on the second 'if'. */ 3698 if (rexpr_has_location (pred)) 3699 SET_EXPR_LOCATION (expr, rexpr_location (pred)); 3700 else_ = shortcut_cond_expr (expr); 3701 else_se = else_ && TREE_SIDE_EFFECTS (else_); 3702 pred = TREE_OPERAND (pred, 0); 3703 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_); 3704 SET_EXPR_LOCATION (expr, locus); 3705 } 3706 } 3707 3708 /* If we're done, great. */ 3709 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR 3710 && TREE_CODE (pred) != TRUTH_ORIF_EXPR) 3711 return expr; 3712 3713 /* Otherwise we need to mess with gotos. Change 3714 if (a) c; else d; 3715 to 3716 if (a); else goto no; 3717 c; goto end; 3718 no: d; end: 3719 and recursively gimplify the condition. */ 3720 3721 true_label = false_label = end_label = NULL_TREE; 3722 3723 /* If our arms just jump somewhere, hijack those labels so we don't 3724 generate jumps to jumps. */ 3725 3726 if (tree then_goto = find_goto_label (then_)) 3727 { 3728 true_label = GOTO_DESTINATION (then_goto); 3729 then_ = NULL; 3730 then_se = false; 3731 } 3732 3733 if (tree else_goto = find_goto_label (else_)) 3734 { 3735 false_label = GOTO_DESTINATION (else_goto); 3736 else_ = NULL; 3737 else_se = false; 3738 } 3739 3740 /* If we aren't hijacking a label for the 'then' branch, it falls through. */ 3741 if (true_label) 3742 true_label_p = &true_label; 3743 else 3744 true_label_p = NULL; 3745 3746 /* The 'else' branch also needs a label if it contains interesting code. */ 3747 if (false_label || else_se) 3748 false_label_p = &false_label; 3749 else 3750 false_label_p = NULL; 3751 3752 /* If there was nothing else in our arms, just forward the label(s). */ 3753 if (!then_se && !else_se) 3754 return shortcut_cond_r (pred, true_label_p, false_label_p, 3755 EXPR_LOC_OR_LOC (expr, input_location)); 3756 3757 /* If our last subexpression already has a terminal label, reuse it. */ 3758 if (else_se) 3759 t = expr_last (else_); 3760 else if (then_se) 3761 t = expr_last (then_); 3762 else 3763 t = NULL; 3764 if (t && TREE_CODE (t) == LABEL_EXPR) 3765 end_label = LABEL_EXPR_LABEL (t); 3766 3767 /* If we don't care about jumping to the 'else' branch, jump to the end 3768 if the condition is false. */ 3769 if (!false_label_p) 3770 false_label_p = &end_label; 3771 3772 /* We only want to emit these labels if we aren't hijacking them. */ 3773 emit_end = (end_label == NULL_TREE); 3774 emit_false = (false_label == NULL_TREE); 3775 3776 /* We only emit the jump over the else clause if we have to--if the 3777 then clause may fall through. Otherwise we can wind up with a 3778 useless jump and a useless label at the end of gimplified code, 3779 which will cause us to think that this conditional as a whole 3780 falls through even if it doesn't. If we then inline a function 3781 which ends with such a condition, that can cause us to issue an 3782 inappropriate warning about control reaching the end of a 3783 non-void function. */ 3784 jump_over_else = block_may_fallthru (then_); 3785 3786 pred = shortcut_cond_r (pred, true_label_p, false_label_p, 3787 EXPR_LOC_OR_LOC (expr, input_location)); 3788 3789 expr = NULL; 3790 append_to_statement_list (pred, &expr); 3791 3792 append_to_statement_list (then_, &expr); 3793 if (else_se) 3794 { 3795 if (jump_over_else) 3796 { 3797 tree last = expr_last (expr); 3798 t = build_and_jump (&end_label); 3799 if (rexpr_has_location (last)) 3800 SET_EXPR_LOCATION (t, rexpr_location (last)); 3801 append_to_statement_list (t, &expr); 3802 } 3803 if (emit_false) 3804 { 3805 t = build1 (LABEL_EXPR, void_type_node, false_label); 3806 append_to_statement_list (t, &expr); 3807 } 3808 append_to_statement_list (else_, &expr); 3809 } 3810 if (emit_end && end_label) 3811 { 3812 t = build1 (LABEL_EXPR, void_type_node, end_label); 3813 append_to_statement_list (t, &expr); 3814 } 3815 3816 return expr; 3817 } 3818 3819 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */ 3820 3821 tree 3822 gimple_boolify (tree expr) 3823 { 3824 tree type = TREE_TYPE (expr); 3825 location_t loc = EXPR_LOCATION (expr); 3826 3827 if (TREE_CODE (expr) == NE_EXPR 3828 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR 3829 && integer_zerop (TREE_OPERAND (expr, 1))) 3830 { 3831 tree call = TREE_OPERAND (expr, 0); 3832 tree fn = get_callee_fndecl (call); 3833 3834 /* For __builtin_expect ((long) (x), y) recurse into x as well 3835 if x is truth_value_p. */ 3836 if (fn 3837 && fndecl_built_in_p (fn, BUILT_IN_EXPECT) 3838 && call_expr_nargs (call) == 2) 3839 { 3840 tree arg = CALL_EXPR_ARG (call, 0); 3841 if (arg) 3842 { 3843 if (TREE_CODE (arg) == NOP_EXPR 3844 && TREE_TYPE (arg) == TREE_TYPE (call)) 3845 arg = TREE_OPERAND (arg, 0); 3846 if (truth_value_p (TREE_CODE (arg))) 3847 { 3848 arg = gimple_boolify (arg); 3849 CALL_EXPR_ARG (call, 0) 3850 = fold_convert_loc (loc, TREE_TYPE (call), arg); 3851 } 3852 } 3853 } 3854 } 3855 3856 switch (TREE_CODE (expr)) 3857 { 3858 case TRUTH_AND_EXPR: 3859 case TRUTH_OR_EXPR: 3860 case TRUTH_XOR_EXPR: 3861 case TRUTH_ANDIF_EXPR: 3862 case TRUTH_ORIF_EXPR: 3863 /* Also boolify the arguments of truth exprs. */ 3864 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1)); 3865 /* FALLTHRU */ 3866 3867 case TRUTH_NOT_EXPR: 3868 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); 3869 3870 /* These expressions always produce boolean results. */ 3871 if (TREE_CODE (type) != BOOLEAN_TYPE) 3872 TREE_TYPE (expr) = boolean_type_node; 3873 return expr; 3874 3875 case ANNOTATE_EXPR: 3876 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1))) 3877 { 3878 case annot_expr_ivdep_kind: 3879 case annot_expr_unroll_kind: 3880 case annot_expr_no_vector_kind: 3881 case annot_expr_vector_kind: 3882 case annot_expr_parallel_kind: 3883 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); 3884 if (TREE_CODE (type) != BOOLEAN_TYPE) 3885 TREE_TYPE (expr) = boolean_type_node; 3886 return expr; 3887 default: 3888 gcc_unreachable (); 3889 } 3890 3891 default: 3892 if (COMPARISON_CLASS_P (expr)) 3893 { 3894 /* There expressions always prduce boolean results. */ 3895 if (TREE_CODE (type) != BOOLEAN_TYPE) 3896 TREE_TYPE (expr) = boolean_type_node; 3897 return expr; 3898 } 3899 /* Other expressions that get here must have boolean values, but 3900 might need to be converted to the appropriate mode. */ 3901 if (TREE_CODE (type) == BOOLEAN_TYPE) 3902 return expr; 3903 return fold_convert_loc (loc, boolean_type_node, expr); 3904 } 3905 } 3906 3907 /* Given a conditional expression *EXPR_P without side effects, gimplify 3908 its operands. New statements are inserted to PRE_P. */ 3909 3910 static enum gimplify_status 3911 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p) 3912 { 3913 tree expr = *expr_p, cond; 3914 enum gimplify_status ret, tret; 3915 enum tree_code code; 3916 3917 cond = gimple_boolify (COND_EXPR_COND (expr)); 3918 3919 /* We need to handle && and || specially, as their gimplification 3920 creates pure cond_expr, thus leading to an infinite cycle otherwise. */ 3921 code = TREE_CODE (cond); 3922 if (code == TRUTH_ANDIF_EXPR) 3923 TREE_SET_CODE (cond, TRUTH_AND_EXPR); 3924 else if (code == TRUTH_ORIF_EXPR) 3925 TREE_SET_CODE (cond, TRUTH_OR_EXPR); 3926 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue); 3927 COND_EXPR_COND (*expr_p) = cond; 3928 3929 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL, 3930 is_gimple_val, fb_rvalue); 3931 ret = MIN (ret, tret); 3932 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL, 3933 is_gimple_val, fb_rvalue); 3934 3935 return MIN (ret, tret); 3936 } 3937 3938 /* Return true if evaluating EXPR could trap. 3939 EXPR is GENERIC, while tree_could_trap_p can be called 3940 only on GIMPLE. */ 3941 3942 bool 3943 generic_expr_could_trap_p (tree expr) 3944 { 3945 unsigned i, n; 3946 3947 if (!expr || is_gimple_val (expr)) 3948 return false; 3949 3950 if (!EXPR_P (expr) || tree_could_trap_p (expr)) 3951 return true; 3952 3953 n = TREE_OPERAND_LENGTH (expr); 3954 for (i = 0; i < n; i++) 3955 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i))) 3956 return true; 3957 3958 return false; 3959 } 3960 3961 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;' 3962 into 3963 3964 if (p) if (p) 3965 t1 = a; a; 3966 else or else 3967 t1 = b; b; 3968 t1; 3969 3970 The second form is used when *EXPR_P is of type void. 3971 3972 PRE_P points to the list where side effects that must happen before 3973 *EXPR_P should be stored. */ 3974 3975 static enum gimplify_status 3976 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback) 3977 { 3978 tree expr = *expr_p; 3979 tree type = TREE_TYPE (expr); 3980 location_t loc = EXPR_LOCATION (expr); 3981 tree tmp, arm1, arm2; 3982 enum gimplify_status ret; 3983 tree label_true, label_false, label_cont; 3984 bool have_then_clause_p, have_else_clause_p; 3985 gcond *cond_stmt; 3986 enum tree_code pred_code; 3987 gimple_seq seq = NULL; 3988 3989 /* If this COND_EXPR has a value, copy the values into a temporary within 3990 the arms. */ 3991 if (!VOID_TYPE_P (type)) 3992 { 3993 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2); 3994 tree result; 3995 3996 /* If either an rvalue is ok or we do not require an lvalue, create the 3997 temporary. But we cannot do that if the type is addressable. */ 3998 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue)) 3999 && !TREE_ADDRESSABLE (type)) 4000 { 4001 if (gimplify_ctxp->allow_rhs_cond_expr 4002 /* If either branch has side effects or could trap, it can't be 4003 evaluated unconditionally. */ 4004 && !TREE_SIDE_EFFECTS (then_) 4005 && !generic_expr_could_trap_p (then_) 4006 && !TREE_SIDE_EFFECTS (else_) 4007 && !generic_expr_could_trap_p (else_)) 4008 return gimplify_pure_cond_expr (expr_p, pre_p); 4009 4010 tmp = create_tmp_var (type, "iftmp"); 4011 result = tmp; 4012 } 4013 4014 /* Otherwise, only create and copy references to the values. */ 4015 else 4016 { 4017 type = build_pointer_type (type); 4018 4019 if (!VOID_TYPE_P (TREE_TYPE (then_))) 4020 then_ = build_fold_addr_expr_loc (loc, then_); 4021 4022 if (!VOID_TYPE_P (TREE_TYPE (else_))) 4023 else_ = build_fold_addr_expr_loc (loc, else_); 4024 4025 expr 4026 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_); 4027 4028 tmp = create_tmp_var (type, "iftmp"); 4029 result = build_simple_mem_ref_loc (loc, tmp); 4030 } 4031 4032 /* Build the new then clause, `tmp = then_;'. But don't build the 4033 assignment if the value is void; in C++ it can be if it's a throw. */ 4034 if (!VOID_TYPE_P (TREE_TYPE (then_))) 4035 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_); 4036 4037 /* Similarly, build the new else clause, `tmp = else_;'. */ 4038 if (!VOID_TYPE_P (TREE_TYPE (else_))) 4039 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_); 4040 4041 TREE_TYPE (expr) = void_type_node; 4042 recalculate_side_effects (expr); 4043 4044 /* Move the COND_EXPR to the prequeue. */ 4045 gimplify_stmt (&expr, pre_p); 4046 4047 *expr_p = result; 4048 return GS_ALL_DONE; 4049 } 4050 4051 /* Remove any COMPOUND_EXPR so the following cases will be caught. */ 4052 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0)); 4053 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR) 4054 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true); 4055 4056 /* Make sure the condition has BOOLEAN_TYPE. */ 4057 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); 4058 4059 /* Break apart && and || conditions. */ 4060 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR 4061 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR) 4062 { 4063 expr = shortcut_cond_expr (expr); 4064 4065 if (expr != *expr_p) 4066 { 4067 *expr_p = expr; 4068 4069 /* We can't rely on gimplify_expr to re-gimplify the expanded 4070 form properly, as cleanups might cause the target labels to be 4071 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to 4072 set up a conditional context. */ 4073 gimple_push_condition (); 4074 gimplify_stmt (expr_p, &seq); 4075 gimple_pop_condition (pre_p); 4076 gimple_seq_add_seq (pre_p, seq); 4077 4078 return GS_ALL_DONE; 4079 } 4080 } 4081 4082 /* Now do the normal gimplification. */ 4083 4084 /* Gimplify condition. */ 4085 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr, 4086 fb_rvalue); 4087 if (ret == GS_ERROR) 4088 return GS_ERROR; 4089 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE); 4090 4091 gimple_push_condition (); 4092 4093 have_then_clause_p = have_else_clause_p = false; 4094 label_true = find_goto_label (TREE_OPERAND (expr, 1)); 4095 if (label_true 4096 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl 4097 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR 4098 have different locations, otherwise we end up with incorrect 4099 location information on the branches. */ 4100 && (optimize 4101 || !EXPR_HAS_LOCATION (expr) 4102 || !rexpr_has_location (label_true) 4103 || EXPR_LOCATION (expr) == rexpr_location (label_true))) 4104 { 4105 have_then_clause_p = true; 4106 label_true = GOTO_DESTINATION (label_true); 4107 } 4108 else 4109 label_true = create_artificial_label (UNKNOWN_LOCATION); 4110 label_false = find_goto_label (TREE_OPERAND (expr, 2)); 4111 if (label_false 4112 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl 4113 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR 4114 have different locations, otherwise we end up with incorrect 4115 location information on the branches. */ 4116 && (optimize 4117 || !EXPR_HAS_LOCATION (expr) 4118 || !rexpr_has_location (label_false) 4119 || EXPR_LOCATION (expr) == rexpr_location (label_false))) 4120 { 4121 have_else_clause_p = true; 4122 label_false = GOTO_DESTINATION (label_false); 4123 } 4124 else 4125 label_false = create_artificial_label (UNKNOWN_LOCATION); 4126 4127 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1, 4128 &arm2); 4129 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, 4130 label_false); 4131 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr))); 4132 gimplify_seq_add_stmt (&seq, cond_stmt); 4133 gimple_stmt_iterator gsi = gsi_last (seq); 4134 maybe_fold_stmt (&gsi); 4135 4136 label_cont = NULL_TREE; 4137 if (!have_then_clause_p) 4138 { 4139 /* For if (...) {} else { code; } put label_true after 4140 the else block. */ 4141 if (TREE_OPERAND (expr, 1) == NULL_TREE 4142 && !have_else_clause_p 4143 && TREE_OPERAND (expr, 2) != NULL_TREE) 4144 label_cont = label_true; 4145 else 4146 { 4147 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true)); 4148 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq); 4149 /* For if (...) { code; } else {} or 4150 if (...) { code; } else goto label; or 4151 if (...) { code; return; } else { ... } 4152 label_cont isn't needed. */ 4153 if (!have_else_clause_p 4154 && TREE_OPERAND (expr, 2) != NULL_TREE 4155 && gimple_seq_may_fallthru (seq)) 4156 { 4157 gimple *g; 4158 label_cont = create_artificial_label (UNKNOWN_LOCATION); 4159 4160 g = gimple_build_goto (label_cont); 4161 4162 /* GIMPLE_COND's are very low level; they have embedded 4163 gotos. This particular embedded goto should not be marked 4164 with the location of the original COND_EXPR, as it would 4165 correspond to the COND_EXPR's condition, not the ELSE or the 4166 THEN arms. To avoid marking it with the wrong location, flag 4167 it as "no location". */ 4168 gimple_set_do_not_emit_location (g); 4169 4170 gimplify_seq_add_stmt (&seq, g); 4171 } 4172 } 4173 } 4174 if (!have_else_clause_p) 4175 { 4176 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false)); 4177 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq); 4178 } 4179 if (label_cont) 4180 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont)); 4181 4182 gimple_pop_condition (pre_p); 4183 gimple_seq_add_seq (pre_p, seq); 4184 4185 if (ret == GS_ERROR) 4186 ; /* Do nothing. */ 4187 else if (have_then_clause_p || have_else_clause_p) 4188 ret = GS_ALL_DONE; 4189 else 4190 { 4191 /* Both arms are empty; replace the COND_EXPR with its predicate. */ 4192 expr = TREE_OPERAND (expr, 0); 4193 gimplify_stmt (&expr, pre_p); 4194 } 4195 4196 *expr_p = NULL; 4197 return ret; 4198 } 4199 4200 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression, 4201 to be marked addressable. 4202 4203 We cannot rely on such an expression being directly markable if a temporary 4204 has been created by the gimplification. In this case, we create another 4205 temporary and initialize it with a copy, which will become a store after we 4206 mark it addressable. This can happen if the front-end passed us something 4207 that it could not mark addressable yet, like a Fortran pass-by-reference 4208 parameter (int) floatvar. */ 4209 4210 static void 4211 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p) 4212 { 4213 while (handled_component_p (*expr_p)) 4214 expr_p = &TREE_OPERAND (*expr_p, 0); 4215 if (is_gimple_reg (*expr_p)) 4216 { 4217 /* Do not allow an SSA name as the temporary. */ 4218 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false); 4219 DECL_GIMPLE_REG_P (var) = 0; 4220 *expr_p = var; 4221 } 4222 } 4223 4224 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with 4225 a call to __builtin_memcpy. */ 4226 4227 static enum gimplify_status 4228 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value, 4229 gimple_seq *seq_p) 4230 { 4231 tree t, to, to_ptr, from, from_ptr; 4232 gcall *gs; 4233 location_t loc = EXPR_LOCATION (*expr_p); 4234 4235 to = TREE_OPERAND (*expr_p, 0); 4236 from = TREE_OPERAND (*expr_p, 1); 4237 4238 /* Mark the RHS addressable. Beware that it may not be possible to do so 4239 directly if a temporary has been created by the gimplification. */ 4240 prepare_gimple_addressable (&from, seq_p); 4241 4242 mark_addressable (from); 4243 from_ptr = build_fold_addr_expr_loc (loc, from); 4244 gimplify_arg (&from_ptr, seq_p, loc); 4245 4246 mark_addressable (to); 4247 to_ptr = build_fold_addr_expr_loc (loc, to); 4248 gimplify_arg (&to_ptr, seq_p, loc); 4249 4250 t = builtin_decl_implicit (BUILT_IN_MEMCPY); 4251 4252 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size); 4253 4254 if (want_value) 4255 { 4256 /* tmp = memcpy() */ 4257 t = create_tmp_var (TREE_TYPE (to_ptr)); 4258 gimple_call_set_lhs (gs, t); 4259 gimplify_seq_add_stmt (seq_p, gs); 4260 4261 *expr_p = build_simple_mem_ref (t); 4262 return GS_ALL_DONE; 4263 } 4264 4265 gimplify_seq_add_stmt (seq_p, gs); 4266 *expr_p = NULL; 4267 return GS_ALL_DONE; 4268 } 4269 4270 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with 4271 a call to __builtin_memset. In this case we know that the RHS is 4272 a CONSTRUCTOR with an empty element list. */ 4273 4274 static enum gimplify_status 4275 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value, 4276 gimple_seq *seq_p) 4277 { 4278 tree t, from, to, to_ptr; 4279 gcall *gs; 4280 location_t loc = EXPR_LOCATION (*expr_p); 4281 4282 /* Assert our assumptions, to abort instead of producing wrong code 4283 silently if they are not met. Beware that the RHS CONSTRUCTOR might 4284 not be immediately exposed. */ 4285 from = TREE_OPERAND (*expr_p, 1); 4286 if (TREE_CODE (from) == WITH_SIZE_EXPR) 4287 from = TREE_OPERAND (from, 0); 4288 4289 gcc_assert (TREE_CODE (from) == CONSTRUCTOR 4290 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from))); 4291 4292 /* Now proceed. */ 4293 to = TREE_OPERAND (*expr_p, 0); 4294 4295 to_ptr = build_fold_addr_expr_loc (loc, to); 4296 gimplify_arg (&to_ptr, seq_p, loc); 4297 t = builtin_decl_implicit (BUILT_IN_MEMSET); 4298 4299 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size); 4300 4301 if (want_value) 4302 { 4303 /* tmp = memset() */ 4304 t = create_tmp_var (TREE_TYPE (to_ptr)); 4305 gimple_call_set_lhs (gs, t); 4306 gimplify_seq_add_stmt (seq_p, gs); 4307 4308 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t); 4309 return GS_ALL_DONE; 4310 } 4311 4312 gimplify_seq_add_stmt (seq_p, gs); 4313 *expr_p = NULL; 4314 return GS_ALL_DONE; 4315 } 4316 4317 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree, 4318 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an 4319 assignment. Return non-null if we detect a potential overlap. */ 4320 4321 struct gimplify_init_ctor_preeval_data 4322 { 4323 /* The base decl of the lhs object. May be NULL, in which case we 4324 have to assume the lhs is indirect. */ 4325 tree lhs_base_decl; 4326 4327 /* The alias set of the lhs object. */ 4328 alias_set_type lhs_alias_set; 4329 }; 4330 4331 static tree 4332 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata) 4333 { 4334 struct gimplify_init_ctor_preeval_data *data 4335 = (struct gimplify_init_ctor_preeval_data *) xdata; 4336 tree t = *tp; 4337 4338 /* If we find the base object, obviously we have overlap. */ 4339 if (data->lhs_base_decl == t) 4340 return t; 4341 4342 /* If the constructor component is indirect, determine if we have a 4343 potential overlap with the lhs. The only bits of information we 4344 have to go on at this point are addressability and alias sets. */ 4345 if ((INDIRECT_REF_P (t) 4346 || TREE_CODE (t) == MEM_REF) 4347 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl)) 4348 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t))) 4349 return t; 4350 4351 /* If the constructor component is a call, determine if it can hide a 4352 potential overlap with the lhs through an INDIRECT_REF like above. 4353 ??? Ugh - this is completely broken. In fact this whole analysis 4354 doesn't look conservative. */ 4355 if (TREE_CODE (t) == CALL_EXPR) 4356 { 4357 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t))); 4358 4359 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type)) 4360 if (POINTER_TYPE_P (TREE_VALUE (type)) 4361 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl)) 4362 && alias_sets_conflict_p (data->lhs_alias_set, 4363 get_alias_set 4364 (TREE_TYPE (TREE_VALUE (type))))) 4365 return t; 4366 } 4367 4368 if (IS_TYPE_OR_DECL_P (t)) 4369 *walk_subtrees = 0; 4370 return NULL; 4371 } 4372 4373 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR, 4374 force values that overlap with the lhs (as described by *DATA) 4375 into temporaries. */ 4376 4377 static void 4378 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 4379 struct gimplify_init_ctor_preeval_data *data) 4380 { 4381 enum gimplify_status one; 4382 4383 /* If the value is constant, then there's nothing to pre-evaluate. */ 4384 if (TREE_CONSTANT (*expr_p)) 4385 { 4386 /* Ensure it does not have side effects, it might contain a reference to 4387 the object we're initializing. */ 4388 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p)); 4389 return; 4390 } 4391 4392 /* If the type has non-trivial constructors, we can't pre-evaluate. */ 4393 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p))) 4394 return; 4395 4396 /* Recurse for nested constructors. */ 4397 if (TREE_CODE (*expr_p) == CONSTRUCTOR) 4398 { 4399 unsigned HOST_WIDE_INT ix; 4400 constructor_elt *ce; 4401 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p); 4402 4403 FOR_EACH_VEC_SAFE_ELT (v, ix, ce) 4404 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data); 4405 4406 return; 4407 } 4408 4409 /* If this is a variable sized type, we must remember the size. */ 4410 maybe_with_size_expr (expr_p); 4411 4412 /* Gimplify the constructor element to something appropriate for the rhs 4413 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know 4414 the gimplifier will consider this a store to memory. Doing this 4415 gimplification now means that we won't have to deal with complicated 4416 language-specific trees, nor trees like SAVE_EXPR that can induce 4417 exponential search behavior. */ 4418 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue); 4419 if (one == GS_ERROR) 4420 { 4421 *expr_p = NULL; 4422 return; 4423 } 4424 4425 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap 4426 with the lhs, since "a = { .x=a }" doesn't make sense. This will 4427 always be true for all scalars, since is_gimple_mem_rhs insists on a 4428 temporary variable for them. */ 4429 if (DECL_P (*expr_p)) 4430 return; 4431 4432 /* If this is of variable size, we have no choice but to assume it doesn't 4433 overlap since we can't make a temporary for it. */ 4434 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST) 4435 return; 4436 4437 /* Otherwise, we must search for overlap ... */ 4438 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL)) 4439 return; 4440 4441 /* ... and if found, force the value into a temporary. */ 4442 *expr_p = get_formal_tmp_var (*expr_p, pre_p); 4443 } 4444 4445 /* A subroutine of gimplify_init_ctor_eval. Create a loop for 4446 a RANGE_EXPR in a CONSTRUCTOR for an array. 4447 4448 var = lower; 4449 loop_entry: 4450 object[var] = value; 4451 if (var == upper) 4452 goto loop_exit; 4453 var = var + 1; 4454 goto loop_entry; 4455 loop_exit: 4456 4457 We increment var _after_ the loop exit check because we might otherwise 4458 fail if upper == TYPE_MAX_VALUE (type for upper). 4459 4460 Note that we never have to deal with SAVE_EXPRs here, because this has 4461 already been taken care of for us, in gimplify_init_ctor_preeval(). */ 4462 4463 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *, 4464 gimple_seq *, bool); 4465 4466 static void 4467 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper, 4468 tree value, tree array_elt_type, 4469 gimple_seq *pre_p, bool cleared) 4470 { 4471 tree loop_entry_label, loop_exit_label, fall_thru_label; 4472 tree var, var_type, cref, tmp; 4473 4474 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION); 4475 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION); 4476 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION); 4477 4478 /* Create and initialize the index variable. */ 4479 var_type = TREE_TYPE (upper); 4480 var = create_tmp_var (var_type); 4481 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower)); 4482 4483 /* Add the loop entry label. */ 4484 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label)); 4485 4486 /* Build the reference. */ 4487 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object), 4488 var, NULL_TREE, NULL_TREE); 4489 4490 /* If we are a constructor, just call gimplify_init_ctor_eval to do 4491 the store. Otherwise just assign value to the reference. */ 4492 4493 if (TREE_CODE (value) == CONSTRUCTOR) 4494 /* NB we might have to call ourself recursively through 4495 gimplify_init_ctor_eval if the value is a constructor. */ 4496 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value), 4497 pre_p, cleared); 4498 else 4499 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value)); 4500 4501 /* We exit the loop when the index var is equal to the upper bound. */ 4502 gimplify_seq_add_stmt (pre_p, 4503 gimple_build_cond (EQ_EXPR, var, upper, 4504 loop_exit_label, fall_thru_label)); 4505 4506 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label)); 4507 4508 /* Otherwise, increment the index var... */ 4509 tmp = build2 (PLUS_EXPR, var_type, var, 4510 fold_convert (var_type, integer_one_node)); 4511 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp)); 4512 4513 /* ...and jump back to the loop entry. */ 4514 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label)); 4515 4516 /* Add the loop exit label. */ 4517 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label)); 4518 } 4519 4520 /* Return true if FDECL is accessing a field that is zero sized. */ 4521 4522 static bool 4523 zero_sized_field_decl (const_tree fdecl) 4524 { 4525 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl) 4526 && integer_zerop (DECL_SIZE (fdecl))) 4527 return true; 4528 return false; 4529 } 4530 4531 /* Return true if TYPE is zero sized. */ 4532 4533 static bool 4534 zero_sized_type (const_tree type) 4535 { 4536 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type) 4537 && integer_zerop (TYPE_SIZE (type))) 4538 return true; 4539 return false; 4540 } 4541 4542 /* A subroutine of gimplify_init_constructor. Generate individual 4543 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the 4544 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the 4545 CONSTRUCTOR. CLEARED is true if the entire LHS object has been 4546 zeroed first. */ 4547 4548 static void 4549 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts, 4550 gimple_seq *pre_p, bool cleared) 4551 { 4552 tree array_elt_type = NULL; 4553 unsigned HOST_WIDE_INT ix; 4554 tree purpose, value; 4555 4556 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE) 4557 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object))); 4558 4559 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value) 4560 { 4561 tree cref; 4562 4563 /* NULL values are created above for gimplification errors. */ 4564 if (value == NULL) 4565 continue; 4566 4567 if (cleared && initializer_zerop (value)) 4568 continue; 4569 4570 /* ??? Here's to hoping the front end fills in all of the indices, 4571 so we don't have to figure out what's missing ourselves. */ 4572 gcc_assert (purpose); 4573 4574 /* Skip zero-sized fields, unless value has side-effects. This can 4575 happen with calls to functions returning a zero-sized type, which 4576 we shouldn't discard. As a number of downstream passes don't 4577 expect sets of zero-sized fields, we rely on the gimplification of 4578 the MODIFY_EXPR we make below to drop the assignment statement. */ 4579 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose)) 4580 continue; 4581 4582 /* If we have a RANGE_EXPR, we have to build a loop to assign the 4583 whole range. */ 4584 if (TREE_CODE (purpose) == RANGE_EXPR) 4585 { 4586 tree lower = TREE_OPERAND (purpose, 0); 4587 tree upper = TREE_OPERAND (purpose, 1); 4588 4589 /* If the lower bound is equal to upper, just treat it as if 4590 upper was the index. */ 4591 if (simple_cst_equal (lower, upper)) 4592 purpose = upper; 4593 else 4594 { 4595 gimplify_init_ctor_eval_range (object, lower, upper, value, 4596 array_elt_type, pre_p, cleared); 4597 continue; 4598 } 4599 } 4600 4601 if (array_elt_type) 4602 { 4603 /* Do not use bitsizetype for ARRAY_REF indices. */ 4604 if (TYPE_DOMAIN (TREE_TYPE (object))) 4605 purpose 4606 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))), 4607 purpose); 4608 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object), 4609 purpose, NULL_TREE, NULL_TREE); 4610 } 4611 else 4612 { 4613 gcc_assert (TREE_CODE (purpose) == FIELD_DECL); 4614 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose), 4615 unshare_expr (object), purpose, NULL_TREE); 4616 } 4617 4618 if (TREE_CODE (value) == CONSTRUCTOR 4619 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE) 4620 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value), 4621 pre_p, cleared); 4622 else 4623 { 4624 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value); 4625 gimplify_and_add (init, pre_p); 4626 ggc_free (init); 4627 } 4628 } 4629 } 4630 4631 /* Return the appropriate RHS predicate for this LHS. */ 4632 4633 gimple_predicate 4634 rhs_predicate_for (tree lhs) 4635 { 4636 if (is_gimple_reg (lhs)) 4637 return is_gimple_reg_rhs_or_call; 4638 else 4639 return is_gimple_mem_rhs_or_call; 4640 } 4641 4642 /* Return the initial guess for an appropriate RHS predicate for this LHS, 4643 before the LHS has been gimplified. */ 4644 4645 static gimple_predicate 4646 initial_rhs_predicate_for (tree lhs) 4647 { 4648 if (is_gimple_reg_type (TREE_TYPE (lhs))) 4649 return is_gimple_reg_rhs_or_call; 4650 else 4651 return is_gimple_mem_rhs_or_call; 4652 } 4653 4654 /* Gimplify a C99 compound literal expression. This just means adding 4655 the DECL_EXPR before the current statement and using its anonymous 4656 decl instead. */ 4657 4658 static enum gimplify_status 4659 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p, 4660 bool (*gimple_test_f) (tree), 4661 fallback_t fallback) 4662 { 4663 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p); 4664 tree decl = DECL_EXPR_DECL (decl_s); 4665 tree init = DECL_INITIAL (decl); 4666 /* Mark the decl as addressable if the compound literal 4667 expression is addressable now, otherwise it is marked too late 4668 after we gimplify the initialization expression. */ 4669 if (TREE_ADDRESSABLE (*expr_p)) 4670 TREE_ADDRESSABLE (decl) = 1; 4671 /* Otherwise, if we don't need an lvalue and have a literal directly 4672 substitute it. Check if it matches the gimple predicate, as 4673 otherwise we'd generate a new temporary, and we can as well just 4674 use the decl we already have. */ 4675 else if (!TREE_ADDRESSABLE (decl) 4676 && !TREE_THIS_VOLATILE (decl) 4677 && init 4678 && (fallback & fb_lvalue) == 0 4679 && gimple_test_f (init)) 4680 { 4681 *expr_p = init; 4682 return GS_OK; 4683 } 4684 4685 /* Preliminarily mark non-addressed complex variables as eligible 4686 for promotion to gimple registers. We'll transform their uses 4687 as we find them. */ 4688 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE 4689 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE) 4690 && !TREE_THIS_VOLATILE (decl) 4691 && !needs_to_live_in_memory (decl)) 4692 DECL_GIMPLE_REG_P (decl) = 1; 4693 4694 /* If the decl is not addressable, then it is being used in some 4695 expression or on the right hand side of a statement, and it can 4696 be put into a readonly data section. */ 4697 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0) 4698 TREE_READONLY (decl) = 1; 4699 4700 /* This decl isn't mentioned in the enclosing block, so add it to the 4701 list of temps. FIXME it seems a bit of a kludge to say that 4702 anonymous artificial vars aren't pushed, but everything else is. */ 4703 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl)) 4704 gimple_add_tmp_var (decl); 4705 4706 gimplify_and_add (decl_s, pre_p); 4707 *expr_p = decl; 4708 return GS_OK; 4709 } 4710 4711 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR, 4712 return a new CONSTRUCTOR if something changed. */ 4713 4714 static tree 4715 optimize_compound_literals_in_ctor (tree orig_ctor) 4716 { 4717 tree ctor = orig_ctor; 4718 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor); 4719 unsigned int idx, num = vec_safe_length (elts); 4720 4721 for (idx = 0; idx < num; idx++) 4722 { 4723 tree value = (*elts)[idx].value; 4724 tree newval = value; 4725 if (TREE_CODE (value) == CONSTRUCTOR) 4726 newval = optimize_compound_literals_in_ctor (value); 4727 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR) 4728 { 4729 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value); 4730 tree decl = DECL_EXPR_DECL (decl_s); 4731 tree init = DECL_INITIAL (decl); 4732 4733 if (!TREE_ADDRESSABLE (value) 4734 && !TREE_ADDRESSABLE (decl) 4735 && init 4736 && TREE_CODE (init) == CONSTRUCTOR) 4737 newval = optimize_compound_literals_in_ctor (init); 4738 } 4739 if (newval == value) 4740 continue; 4741 4742 if (ctor == orig_ctor) 4743 { 4744 ctor = copy_node (orig_ctor); 4745 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts); 4746 elts = CONSTRUCTOR_ELTS (ctor); 4747 } 4748 (*elts)[idx].value = newval; 4749 } 4750 return ctor; 4751 } 4752 4753 /* A subroutine of gimplify_modify_expr. Break out elements of a 4754 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs. 4755 4756 Note that we still need to clear any elements that don't have explicit 4757 initializers, so if not all elements are initialized we keep the 4758 original MODIFY_EXPR, we just remove all of the constructor elements. 4759 4760 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return 4761 GS_ERROR if we would have to create a temporary when gimplifying 4762 this constructor. Otherwise, return GS_OK. 4763 4764 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */ 4765 4766 static enum gimplify_status 4767 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 4768 bool want_value, bool notify_temp_creation) 4769 { 4770 tree object, ctor, type; 4771 enum gimplify_status ret; 4772 vec<constructor_elt, va_gc> *elts; 4773 4774 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR); 4775 4776 if (!notify_temp_creation) 4777 { 4778 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 4779 is_gimple_lvalue, fb_lvalue); 4780 if (ret == GS_ERROR) 4781 return ret; 4782 } 4783 4784 object = TREE_OPERAND (*expr_p, 0); 4785 ctor = TREE_OPERAND (*expr_p, 1) 4786 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1)); 4787 type = TREE_TYPE (ctor); 4788 elts = CONSTRUCTOR_ELTS (ctor); 4789 ret = GS_ALL_DONE; 4790 4791 switch (TREE_CODE (type)) 4792 { 4793 case RECORD_TYPE: 4794 case UNION_TYPE: 4795 case QUAL_UNION_TYPE: 4796 case ARRAY_TYPE: 4797 { 4798 struct gimplify_init_ctor_preeval_data preeval_data; 4799 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements; 4800 HOST_WIDE_INT num_unique_nonzero_elements; 4801 bool cleared, complete_p, valid_const_initializer; 4802 /* Use readonly data for initializers of this or smaller size 4803 regardless of the num_nonzero_elements / num_unique_nonzero_elements 4804 ratio. */ 4805 const HOST_WIDE_INT min_unique_size = 64; 4806 /* If num_nonzero_elements / num_unique_nonzero_elements ratio 4807 is smaller than this, use readonly data. */ 4808 const int unique_nonzero_ratio = 8; 4809 4810 /* Aggregate types must lower constructors to initialization of 4811 individual elements. The exception is that a CONSTRUCTOR node 4812 with no elements indicates zero-initialization of the whole. */ 4813 if (vec_safe_is_empty (elts)) 4814 { 4815 if (notify_temp_creation) 4816 return GS_OK; 4817 break; 4818 } 4819 4820 /* Fetch information about the constructor to direct later processing. 4821 We might want to make static versions of it in various cases, and 4822 can only do so if it known to be a valid constant initializer. */ 4823 valid_const_initializer 4824 = categorize_ctor_elements (ctor, &num_nonzero_elements, 4825 &num_unique_nonzero_elements, 4826 &num_ctor_elements, &complete_p); 4827 4828 /* If a const aggregate variable is being initialized, then it 4829 should never be a lose to promote the variable to be static. */ 4830 if (valid_const_initializer 4831 && num_nonzero_elements > 1 4832 && TREE_READONLY (object) 4833 && VAR_P (object) 4834 && !DECL_REGISTER (object) 4835 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)) 4836 /* For ctors that have many repeated nonzero elements 4837 represented through RANGE_EXPRs, prefer initializing 4838 those through runtime loops over copies of large amounts 4839 of data from readonly data section. */ 4840 && (num_unique_nonzero_elements 4841 > num_nonzero_elements / unique_nonzero_ratio 4842 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type) 4843 <= (unsigned HOST_WIDE_INT) min_unique_size))) 4844 { 4845 if (notify_temp_creation) 4846 return GS_ERROR; 4847 DECL_INITIAL (object) = ctor; 4848 TREE_STATIC (object) = 1; 4849 if (!DECL_NAME (object)) 4850 DECL_NAME (object) = create_tmp_var_name ("C"); 4851 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL); 4852 4853 /* ??? C++ doesn't automatically append a .<number> to the 4854 assembler name, and even when it does, it looks at FE private 4855 data structures to figure out what that number should be, 4856 which are not set for this variable. I suppose this is 4857 important for local statics for inline functions, which aren't 4858 "local" in the object file sense. So in order to get a unique 4859 TU-local symbol, we must invoke the lhd version now. */ 4860 lhd_set_decl_assembler_name (object); 4861 4862 *expr_p = NULL_TREE; 4863 break; 4864 } 4865 4866 /* If there are "lots" of initialized elements, even discounting 4867 those that are not address constants (and thus *must* be 4868 computed at runtime), then partition the constructor into 4869 constant and non-constant parts. Block copy the constant 4870 parts in, then generate code for the non-constant parts. */ 4871 /* TODO. There's code in cp/typeck.c to do this. */ 4872 4873 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0) 4874 /* store_constructor will ignore the clearing of variable-sized 4875 objects. Initializers for such objects must explicitly set 4876 every field that needs to be set. */ 4877 cleared = false; 4878 else if (!complete_p) 4879 /* If the constructor isn't complete, clear the whole object 4880 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it. 4881 4882 ??? This ought not to be needed. For any element not present 4883 in the initializer, we should simply set them to zero. Except 4884 we'd need to *find* the elements that are not present, and that 4885 requires trickery to avoid quadratic compile-time behavior in 4886 large cases or excessive memory use in small cases. */ 4887 cleared = !CONSTRUCTOR_NO_CLEARING (ctor); 4888 else if (num_ctor_elements - num_nonzero_elements 4889 > CLEAR_RATIO (optimize_function_for_speed_p (cfun)) 4890 && num_nonzero_elements < num_ctor_elements / 4) 4891 /* If there are "lots" of zeros, it's more efficient to clear 4892 the memory and then set the nonzero elements. */ 4893 cleared = true; 4894 else 4895 cleared = false; 4896 4897 /* If there are "lots" of initialized elements, and all of them 4898 are valid address constants, then the entire initializer can 4899 be dropped to memory, and then memcpy'd out. Don't do this 4900 for sparse arrays, though, as it's more efficient to follow 4901 the standard CONSTRUCTOR behavior of memset followed by 4902 individual element initialization. Also don't do this for small 4903 all-zero initializers (which aren't big enough to merit 4904 clearing), and don't try to make bitwise copies of 4905 TREE_ADDRESSABLE types. */ 4906 4907 if (valid_const_initializer 4908 && !(cleared || num_nonzero_elements == 0) 4909 && !TREE_ADDRESSABLE (type)) 4910 { 4911 HOST_WIDE_INT size = int_size_in_bytes (type); 4912 unsigned int align; 4913 4914 /* ??? We can still get unbounded array types, at least 4915 from the C++ front end. This seems wrong, but attempt 4916 to work around it for now. */ 4917 if (size < 0) 4918 { 4919 size = int_size_in_bytes (TREE_TYPE (object)); 4920 if (size >= 0) 4921 TREE_TYPE (ctor) = type = TREE_TYPE (object); 4922 } 4923 4924 /* Find the maximum alignment we can assume for the object. */ 4925 /* ??? Make use of DECL_OFFSET_ALIGN. */ 4926 if (DECL_P (object)) 4927 align = DECL_ALIGN (object); 4928 else 4929 align = TYPE_ALIGN (type); 4930 4931 /* Do a block move either if the size is so small as to make 4932 each individual move a sub-unit move on average, or if it 4933 is so large as to make individual moves inefficient. */ 4934 if (size > 0 4935 && num_nonzero_elements > 1 4936 /* For ctors that have many repeated nonzero elements 4937 represented through RANGE_EXPRs, prefer initializing 4938 those through runtime loops over copies of large amounts 4939 of data from readonly data section. */ 4940 && (num_unique_nonzero_elements 4941 > num_nonzero_elements / unique_nonzero_ratio 4942 || size <= min_unique_size) 4943 && (size < num_nonzero_elements 4944 || !can_move_by_pieces (size, align))) 4945 { 4946 if (notify_temp_creation) 4947 return GS_ERROR; 4948 4949 walk_tree (&ctor, force_labels_r, NULL, NULL); 4950 ctor = tree_output_constant_def (ctor); 4951 if (!useless_type_conversion_p (type, TREE_TYPE (ctor))) 4952 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor); 4953 TREE_OPERAND (*expr_p, 1) = ctor; 4954 4955 /* This is no longer an assignment of a CONSTRUCTOR, but 4956 we still may have processing to do on the LHS. So 4957 pretend we didn't do anything here to let that happen. */ 4958 return GS_UNHANDLED; 4959 } 4960 } 4961 4962 /* If the target is volatile, we have non-zero elements and more than 4963 one field to assign, initialize the target from a temporary. */ 4964 if (TREE_THIS_VOLATILE (object) 4965 && !TREE_ADDRESSABLE (type) 4966 && (num_nonzero_elements > 0 || !cleared) 4967 && vec_safe_length (elts) > 1) 4968 { 4969 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type)); 4970 TREE_OPERAND (*expr_p, 0) = temp; 4971 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p), 4972 *expr_p, 4973 build2 (MODIFY_EXPR, void_type_node, 4974 object, temp)); 4975 return GS_OK; 4976 } 4977 4978 if (notify_temp_creation) 4979 return GS_OK; 4980 4981 /* If there are nonzero elements and if needed, pre-evaluate to capture 4982 elements overlapping with the lhs into temporaries. We must do this 4983 before clearing to fetch the values before they are zeroed-out. */ 4984 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR) 4985 { 4986 preeval_data.lhs_base_decl = get_base_address (object); 4987 if (!DECL_P (preeval_data.lhs_base_decl)) 4988 preeval_data.lhs_base_decl = NULL; 4989 preeval_data.lhs_alias_set = get_alias_set (object); 4990 4991 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1), 4992 pre_p, post_p, &preeval_data); 4993 } 4994 4995 bool ctor_has_side_effects_p 4996 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1)); 4997 4998 if (cleared) 4999 { 5000 /* Zap the CONSTRUCTOR element list, which simplifies this case. 5001 Note that we still have to gimplify, in order to handle the 5002 case of variable sized types. Avoid shared tree structures. */ 5003 CONSTRUCTOR_ELTS (ctor) = NULL; 5004 TREE_SIDE_EFFECTS (ctor) = 0; 5005 object = unshare_expr (object); 5006 gimplify_stmt (expr_p, pre_p); 5007 } 5008 5009 /* If we have not block cleared the object, or if there are nonzero 5010 elements in the constructor, or if the constructor has side effects, 5011 add assignments to the individual scalar fields of the object. */ 5012 if (!cleared 5013 || num_nonzero_elements > 0 5014 || ctor_has_side_effects_p) 5015 gimplify_init_ctor_eval (object, elts, pre_p, cleared); 5016 5017 *expr_p = NULL_TREE; 5018 } 5019 break; 5020 5021 case COMPLEX_TYPE: 5022 { 5023 tree r, i; 5024 5025 if (notify_temp_creation) 5026 return GS_OK; 5027 5028 /* Extract the real and imaginary parts out of the ctor. */ 5029 gcc_assert (elts->length () == 2); 5030 r = (*elts)[0].value; 5031 i = (*elts)[1].value; 5032 if (r == NULL || i == NULL) 5033 { 5034 tree zero = build_zero_cst (TREE_TYPE (type)); 5035 if (r == NULL) 5036 r = zero; 5037 if (i == NULL) 5038 i = zero; 5039 } 5040 5041 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to 5042 represent creation of a complex value. */ 5043 if (TREE_CONSTANT (r) && TREE_CONSTANT (i)) 5044 { 5045 ctor = build_complex (type, r, i); 5046 TREE_OPERAND (*expr_p, 1) = ctor; 5047 } 5048 else 5049 { 5050 ctor = build2 (COMPLEX_EXPR, type, r, i); 5051 TREE_OPERAND (*expr_p, 1) = ctor; 5052 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), 5053 pre_p, 5054 post_p, 5055 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)), 5056 fb_rvalue); 5057 } 5058 } 5059 break; 5060 5061 case VECTOR_TYPE: 5062 { 5063 unsigned HOST_WIDE_INT ix; 5064 constructor_elt *ce; 5065 5066 if (notify_temp_creation) 5067 return GS_OK; 5068 5069 /* Go ahead and simplify constant constructors to VECTOR_CST. */ 5070 if (TREE_CONSTANT (ctor)) 5071 { 5072 bool constant_p = true; 5073 tree value; 5074 5075 /* Even when ctor is constant, it might contain non-*_CST 5076 elements, such as addresses or trapping values like 5077 1.0/0.0 - 1.0/0.0. Such expressions don't belong 5078 in VECTOR_CST nodes. */ 5079 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value) 5080 if (!CONSTANT_CLASS_P (value)) 5081 { 5082 constant_p = false; 5083 break; 5084 } 5085 5086 if (constant_p) 5087 { 5088 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts); 5089 break; 5090 } 5091 5092 TREE_CONSTANT (ctor) = 0; 5093 } 5094 5095 /* Vector types use CONSTRUCTOR all the way through gimple 5096 compilation as a general initializer. */ 5097 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce) 5098 { 5099 enum gimplify_status tret; 5100 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val, 5101 fb_rvalue); 5102 if (tret == GS_ERROR) 5103 ret = GS_ERROR; 5104 else if (TREE_STATIC (ctor) 5105 && !initializer_constant_valid_p (ce->value, 5106 TREE_TYPE (ce->value))) 5107 TREE_STATIC (ctor) = 0; 5108 } 5109 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0))) 5110 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p); 5111 } 5112 break; 5113 5114 default: 5115 /* So how did we get a CONSTRUCTOR for a scalar type? */ 5116 gcc_unreachable (); 5117 } 5118 5119 if (ret == GS_ERROR) 5120 return GS_ERROR; 5121 /* If we have gimplified both sides of the initializer but have 5122 not emitted an assignment, do so now. */ 5123 if (*expr_p) 5124 { 5125 tree lhs = TREE_OPERAND (*expr_p, 0); 5126 tree rhs = TREE_OPERAND (*expr_p, 1); 5127 if (want_value && object == lhs) 5128 lhs = unshare_expr (lhs); 5129 gassign *init = gimple_build_assign (lhs, rhs); 5130 gimplify_seq_add_stmt (pre_p, init); 5131 } 5132 if (want_value) 5133 { 5134 *expr_p = object; 5135 return GS_OK; 5136 } 5137 else 5138 { 5139 *expr_p = NULL; 5140 return GS_ALL_DONE; 5141 } 5142 } 5143 5144 /* Given a pointer value OP0, return a simplified version of an 5145 indirection through OP0, or NULL_TREE if no simplification is 5146 possible. This may only be applied to a rhs of an expression. 5147 Note that the resulting type may be different from the type pointed 5148 to in the sense that it is still compatible from the langhooks 5149 point of view. */ 5150 5151 static tree 5152 gimple_fold_indirect_ref_rhs (tree t) 5153 { 5154 return gimple_fold_indirect_ref (t); 5155 } 5156 5157 /* Subroutine of gimplify_modify_expr to do simplifications of 5158 MODIFY_EXPRs based on the code of the RHS. We loop for as long as 5159 something changes. */ 5160 5161 static enum gimplify_status 5162 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, 5163 gimple_seq *pre_p, gimple_seq *post_p, 5164 bool want_value) 5165 { 5166 enum gimplify_status ret = GS_UNHANDLED; 5167 bool changed; 5168 5169 do 5170 { 5171 changed = false; 5172 switch (TREE_CODE (*from_p)) 5173 { 5174 case VAR_DECL: 5175 /* If we're assigning from a read-only variable initialized with 5176 a constructor, do the direct assignment from the constructor, 5177 but only if neither source nor target are volatile since this 5178 latter assignment might end up being done on a per-field basis. */ 5179 if (DECL_INITIAL (*from_p) 5180 && TREE_READONLY (*from_p) 5181 && !TREE_THIS_VOLATILE (*from_p) 5182 && !TREE_THIS_VOLATILE (*to_p) 5183 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR) 5184 { 5185 tree old_from = *from_p; 5186 enum gimplify_status subret; 5187 5188 /* Move the constructor into the RHS. */ 5189 *from_p = unshare_expr (DECL_INITIAL (*from_p)); 5190 5191 /* Let's see if gimplify_init_constructor will need to put 5192 it in memory. */ 5193 subret = gimplify_init_constructor (expr_p, NULL, NULL, 5194 false, true); 5195 if (subret == GS_ERROR) 5196 { 5197 /* If so, revert the change. */ 5198 *from_p = old_from; 5199 } 5200 else 5201 { 5202 ret = GS_OK; 5203 changed = true; 5204 } 5205 } 5206 break; 5207 case INDIRECT_REF: 5208 { 5209 /* If we have code like 5210 5211 *(const A*)(A*)&x 5212 5213 where the type of "x" is a (possibly cv-qualified variant 5214 of "A"), treat the entire expression as identical to "x". 5215 This kind of code arises in C++ when an object is bound 5216 to a const reference, and if "x" is a TARGET_EXPR we want 5217 to take advantage of the optimization below. */ 5218 bool volatile_p = TREE_THIS_VOLATILE (*from_p); 5219 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)); 5220 if (t) 5221 { 5222 if (TREE_THIS_VOLATILE (t) != volatile_p) 5223 { 5224 if (DECL_P (t)) 5225 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p), 5226 build_fold_addr_expr (t)); 5227 if (REFERENCE_CLASS_P (t)) 5228 TREE_THIS_VOLATILE (t) = volatile_p; 5229 } 5230 *from_p = t; 5231 ret = GS_OK; 5232 changed = true; 5233 } 5234 break; 5235 } 5236 5237 case TARGET_EXPR: 5238 { 5239 /* If we are initializing something from a TARGET_EXPR, strip the 5240 TARGET_EXPR and initialize it directly, if possible. This can't 5241 be done if the initializer is void, since that implies that the 5242 temporary is set in some non-trivial way. 5243 5244 ??? What about code that pulls out the temp and uses it 5245 elsewhere? I think that such code never uses the TARGET_EXPR as 5246 an initializer. If I'm wrong, we'll die because the temp won't 5247 have any RTL. In that case, I guess we'll need to replace 5248 references somehow. */ 5249 tree init = TARGET_EXPR_INITIAL (*from_p); 5250 5251 if (init 5252 && (TREE_CODE (*expr_p) != MODIFY_EXPR 5253 || !TARGET_EXPR_NO_ELIDE (*from_p)) 5254 && !VOID_TYPE_P (TREE_TYPE (init))) 5255 { 5256 *from_p = init; 5257 ret = GS_OK; 5258 changed = true; 5259 } 5260 } 5261 break; 5262 5263 case COMPOUND_EXPR: 5264 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be 5265 caught. */ 5266 gimplify_compound_expr (from_p, pre_p, true); 5267 ret = GS_OK; 5268 changed = true; 5269 break; 5270 5271 case CONSTRUCTOR: 5272 /* If we already made some changes, let the front end have a 5273 crack at this before we break it down. */ 5274 if (ret != GS_UNHANDLED) 5275 break; 5276 /* If we're initializing from a CONSTRUCTOR, break this into 5277 individual MODIFY_EXPRs. */ 5278 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value, 5279 false); 5280 5281 case COND_EXPR: 5282 /* If we're assigning to a non-register type, push the assignment 5283 down into the branches. This is mandatory for ADDRESSABLE types, 5284 since we cannot generate temporaries for such, but it saves a 5285 copy in other cases as well. */ 5286 if (!is_gimple_reg_type (TREE_TYPE (*from_p))) 5287 { 5288 /* This code should mirror the code in gimplify_cond_expr. */ 5289 enum tree_code code = TREE_CODE (*expr_p); 5290 tree cond = *from_p; 5291 tree result = *to_p; 5292 5293 ret = gimplify_expr (&result, pre_p, post_p, 5294 is_gimple_lvalue, fb_lvalue); 5295 if (ret != GS_ERROR) 5296 ret = GS_OK; 5297 5298 /* If we are going to write RESULT more than once, clear 5299 TREE_READONLY flag, otherwise we might incorrectly promote 5300 the variable to static const and initialize it at compile 5301 time in one of the branches. */ 5302 if (VAR_P (result) 5303 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node 5304 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node) 5305 TREE_READONLY (result) = 0; 5306 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node) 5307 TREE_OPERAND (cond, 1) 5308 = build2 (code, void_type_node, result, 5309 TREE_OPERAND (cond, 1)); 5310 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node) 5311 TREE_OPERAND (cond, 2) 5312 = build2 (code, void_type_node, unshare_expr (result), 5313 TREE_OPERAND (cond, 2)); 5314 5315 TREE_TYPE (cond) = void_type_node; 5316 recalculate_side_effects (cond); 5317 5318 if (want_value) 5319 { 5320 gimplify_and_add (cond, pre_p); 5321 *expr_p = unshare_expr (result); 5322 } 5323 else 5324 *expr_p = cond; 5325 return ret; 5326 } 5327 break; 5328 5329 case CALL_EXPR: 5330 /* For calls that return in memory, give *to_p as the CALL_EXPR's 5331 return slot so that we don't generate a temporary. */ 5332 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p) 5333 && aggregate_value_p (*from_p, *from_p)) 5334 { 5335 bool use_target; 5336 5337 if (!(rhs_predicate_for (*to_p))(*from_p)) 5338 /* If we need a temporary, *to_p isn't accurate. */ 5339 use_target = false; 5340 /* It's OK to use the return slot directly unless it's an NRV. */ 5341 else if (TREE_CODE (*to_p) == RESULT_DECL 5342 && DECL_NAME (*to_p) == NULL_TREE 5343 && needs_to_live_in_memory (*to_p)) 5344 use_target = true; 5345 else if (is_gimple_reg_type (TREE_TYPE (*to_p)) 5346 || (DECL_P (*to_p) && DECL_REGISTER (*to_p))) 5347 /* Don't force regs into memory. */ 5348 use_target = false; 5349 else if (TREE_CODE (*expr_p) == INIT_EXPR) 5350 /* It's OK to use the target directly if it's being 5351 initialized. */ 5352 use_target = true; 5353 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p))) 5354 != INTEGER_CST) 5355 /* Always use the target and thus RSO for variable-sized types. 5356 GIMPLE cannot deal with a variable-sized assignment 5357 embedded in a call statement. */ 5358 use_target = true; 5359 else if (TREE_CODE (*to_p) != SSA_NAME 5360 && (!is_gimple_variable (*to_p) 5361 || needs_to_live_in_memory (*to_p))) 5362 /* Don't use the original target if it's already addressable; 5363 if its address escapes, and the called function uses the 5364 NRV optimization, a conforming program could see *to_p 5365 change before the called function returns; see c++/19317. 5366 When optimizing, the return_slot pass marks more functions 5367 as safe after we have escape info. */ 5368 use_target = false; 5369 else 5370 use_target = true; 5371 5372 if (use_target) 5373 { 5374 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1; 5375 mark_addressable (*to_p); 5376 } 5377 } 5378 break; 5379 5380 case WITH_SIZE_EXPR: 5381 /* Likewise for calls that return an aggregate of non-constant size, 5382 since we would not be able to generate a temporary at all. */ 5383 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR) 5384 { 5385 *from_p = TREE_OPERAND (*from_p, 0); 5386 /* We don't change ret in this case because the 5387 WITH_SIZE_EXPR might have been added in 5388 gimplify_modify_expr, so returning GS_OK would lead to an 5389 infinite loop. */ 5390 changed = true; 5391 } 5392 break; 5393 5394 /* If we're initializing from a container, push the initialization 5395 inside it. */ 5396 case CLEANUP_POINT_EXPR: 5397 case BIND_EXPR: 5398 case STATEMENT_LIST: 5399 { 5400 tree wrap = *from_p; 5401 tree t; 5402 5403 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval, 5404 fb_lvalue); 5405 if (ret != GS_ERROR) 5406 ret = GS_OK; 5407 5408 t = voidify_wrapper_expr (wrap, *expr_p); 5409 gcc_assert (t == *expr_p); 5410 5411 if (want_value) 5412 { 5413 gimplify_and_add (wrap, pre_p); 5414 *expr_p = unshare_expr (*to_p); 5415 } 5416 else 5417 *expr_p = wrap; 5418 return GS_OK; 5419 } 5420 5421 case COMPOUND_LITERAL_EXPR: 5422 { 5423 tree complit = TREE_OPERAND (*expr_p, 1); 5424 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit); 5425 tree decl = DECL_EXPR_DECL (decl_s); 5426 tree init = DECL_INITIAL (decl); 5427 5428 /* struct T x = (struct T) { 0, 1, 2 } can be optimized 5429 into struct T x = { 0, 1, 2 } if the address of the 5430 compound literal has never been taken. */ 5431 if (!TREE_ADDRESSABLE (complit) 5432 && !TREE_ADDRESSABLE (decl) 5433 && init) 5434 { 5435 *expr_p = copy_node (*expr_p); 5436 TREE_OPERAND (*expr_p, 1) = init; 5437 return GS_OK; 5438 } 5439 } 5440 5441 default: 5442 break; 5443 } 5444 } 5445 while (changed); 5446 5447 return ret; 5448 } 5449 5450 5451 /* Return true if T looks like a valid GIMPLE statement. */ 5452 5453 static bool 5454 is_gimple_stmt (tree t) 5455 { 5456 const enum tree_code code = TREE_CODE (t); 5457 5458 switch (code) 5459 { 5460 case NOP_EXPR: 5461 /* The only valid NOP_EXPR is the empty statement. */ 5462 return IS_EMPTY_STMT (t); 5463 5464 case BIND_EXPR: 5465 case COND_EXPR: 5466 /* These are only valid if they're void. */ 5467 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t)); 5468 5469 case SWITCH_EXPR: 5470 case GOTO_EXPR: 5471 case RETURN_EXPR: 5472 case LABEL_EXPR: 5473 case CASE_LABEL_EXPR: 5474 case TRY_CATCH_EXPR: 5475 case TRY_FINALLY_EXPR: 5476 case EH_FILTER_EXPR: 5477 case CATCH_EXPR: 5478 case ASM_EXPR: 5479 case STATEMENT_LIST: 5480 case OACC_PARALLEL: 5481 case OACC_KERNELS: 5482 case OACC_DATA: 5483 case OACC_HOST_DATA: 5484 case OACC_DECLARE: 5485 case OACC_UPDATE: 5486 case OACC_ENTER_DATA: 5487 case OACC_EXIT_DATA: 5488 case OACC_CACHE: 5489 case OMP_PARALLEL: 5490 case OMP_FOR: 5491 case OMP_SIMD: 5492 case OMP_DISTRIBUTE: 5493 case OACC_LOOP: 5494 case OMP_SECTIONS: 5495 case OMP_SECTION: 5496 case OMP_SINGLE: 5497 case OMP_MASTER: 5498 case OMP_TASKGROUP: 5499 case OMP_ORDERED: 5500 case OMP_CRITICAL: 5501 case OMP_TASK: 5502 case OMP_TARGET: 5503 case OMP_TARGET_DATA: 5504 case OMP_TARGET_UPDATE: 5505 case OMP_TARGET_ENTER_DATA: 5506 case OMP_TARGET_EXIT_DATA: 5507 case OMP_TASKLOOP: 5508 case OMP_TEAMS: 5509 /* These are always void. */ 5510 return true; 5511 5512 case CALL_EXPR: 5513 case MODIFY_EXPR: 5514 case PREDICT_EXPR: 5515 /* These are valid regardless of their type. */ 5516 return true; 5517 5518 default: 5519 return false; 5520 } 5521 } 5522 5523 5524 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is 5525 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with 5526 DECL_GIMPLE_REG_P set. 5527 5528 IMPORTANT NOTE: This promotion is performed by introducing a load of the 5529 other, unmodified part of the complex object just before the total store. 5530 As a consequence, if the object is still uninitialized, an undefined value 5531 will be loaded into a register, which may result in a spurious exception 5532 if the register is floating-point and the value happens to be a signaling 5533 NaN for example. Then the fully-fledged complex operations lowering pass 5534 followed by a DCE pass are necessary in order to fix things up. */ 5535 5536 static enum gimplify_status 5537 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p, 5538 bool want_value) 5539 { 5540 enum tree_code code, ocode; 5541 tree lhs, rhs, new_rhs, other, realpart, imagpart; 5542 5543 lhs = TREE_OPERAND (*expr_p, 0); 5544 rhs = TREE_OPERAND (*expr_p, 1); 5545 code = TREE_CODE (lhs); 5546 lhs = TREE_OPERAND (lhs, 0); 5547 5548 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR; 5549 other = build1 (ocode, TREE_TYPE (rhs), lhs); 5550 TREE_NO_WARNING (other) = 1; 5551 other = get_formal_tmp_var (other, pre_p); 5552 5553 realpart = code == REALPART_EXPR ? rhs : other; 5554 imagpart = code == REALPART_EXPR ? other : rhs; 5555 5556 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart)) 5557 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart); 5558 else 5559 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart); 5560 5561 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs)); 5562 *expr_p = (want_value) ? rhs : NULL_TREE; 5563 5564 return GS_ALL_DONE; 5565 } 5566 5567 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P. 5568 5569 modify_expr 5570 : varname '=' rhs 5571 | '*' ID '=' rhs 5572 5573 PRE_P points to the list where side effects that must happen before 5574 *EXPR_P should be stored. 5575 5576 POST_P points to the list where side effects that must happen after 5577 *EXPR_P should be stored. 5578 5579 WANT_VALUE is nonzero iff we want to use the value of this expression 5580 in another expression. */ 5581 5582 static enum gimplify_status 5583 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 5584 bool want_value) 5585 { 5586 tree *from_p = &TREE_OPERAND (*expr_p, 1); 5587 tree *to_p = &TREE_OPERAND (*expr_p, 0); 5588 enum gimplify_status ret = GS_UNHANDLED; 5589 gimple *assign; 5590 location_t loc = EXPR_LOCATION (*expr_p); 5591 gimple_stmt_iterator gsi; 5592 5593 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR 5594 || TREE_CODE (*expr_p) == INIT_EXPR); 5595 5596 /* Trying to simplify a clobber using normal logic doesn't work, 5597 so handle it here. */ 5598 if (TREE_CLOBBER_P (*from_p)) 5599 { 5600 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue); 5601 if (ret == GS_ERROR) 5602 return ret; 5603 gcc_assert (!want_value); 5604 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF) 5605 { 5606 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p), 5607 pre_p, post_p); 5608 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr); 5609 } 5610 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p)); 5611 *expr_p = NULL; 5612 return GS_ALL_DONE; 5613 } 5614 5615 /* Insert pointer conversions required by the middle-end that are not 5616 required by the frontend. This fixes middle-end type checking for 5617 for example gcc.dg/redecl-6.c. */ 5618 if (POINTER_TYPE_P (TREE_TYPE (*to_p))) 5619 { 5620 STRIP_USELESS_TYPE_CONVERSION (*from_p); 5621 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p))) 5622 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p); 5623 } 5624 5625 /* See if any simplifications can be done based on what the RHS is. */ 5626 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p, 5627 want_value); 5628 if (ret != GS_UNHANDLED) 5629 return ret; 5630 5631 /* For zero sized types only gimplify the left hand side and right hand 5632 side as statements and throw away the assignment. Do this after 5633 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable 5634 types properly. */ 5635 if (zero_sized_type (TREE_TYPE (*from_p)) 5636 && !want_value 5637 /* Don't do this for calls that return addressable types, expand_call 5638 relies on those having a lhs. */ 5639 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p)) 5640 && TREE_CODE (*from_p) == CALL_EXPR)) 5641 { 5642 gimplify_stmt (from_p, pre_p); 5643 gimplify_stmt (to_p, pre_p); 5644 *expr_p = NULL_TREE; 5645 return GS_ALL_DONE; 5646 } 5647 5648 /* If the value being copied is of variable width, compute the length 5649 of the copy into a WITH_SIZE_EXPR. Note that we need to do this 5650 before gimplifying any of the operands so that we can resolve any 5651 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses 5652 the size of the expression to be copied, not of the destination, so 5653 that is what we must do here. */ 5654 maybe_with_size_expr (from_p); 5655 5656 /* As a special case, we have to temporarily allow for assignments 5657 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is 5658 a toplevel statement, when gimplifying the GENERIC expression 5659 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple 5660 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>. 5661 5662 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To 5663 prevent gimplify_expr from trying to create a new temporary for 5664 foo's LHS, we tell it that it should only gimplify until it 5665 reaches the CALL_EXPR. On return from gimplify_expr, the newly 5666 created GIMPLE_CALL <foo> will be the last statement in *PRE_P 5667 and all we need to do here is set 'a' to be its LHS. */ 5668 5669 /* Gimplify the RHS first for C++17 and bug 71104. */ 5670 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p); 5671 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue); 5672 if (ret == GS_ERROR) 5673 return ret; 5674 5675 /* Then gimplify the LHS. */ 5676 /* If we gimplified the RHS to a CALL_EXPR and that call may return 5677 twice we have to make sure to gimplify into non-SSA as otherwise 5678 the abnormal edge added later will make those defs not dominate 5679 their uses. 5680 ??? Technically this applies only to the registers used in the 5681 resulting non-register *TO_P. */ 5682 bool saved_into_ssa = gimplify_ctxp->into_ssa; 5683 if (saved_into_ssa 5684 && TREE_CODE (*from_p) == CALL_EXPR 5685 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE) 5686 gimplify_ctxp->into_ssa = false; 5687 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue); 5688 gimplify_ctxp->into_ssa = saved_into_ssa; 5689 if (ret == GS_ERROR) 5690 return ret; 5691 5692 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial 5693 guess for the predicate was wrong. */ 5694 gimple_predicate final_pred = rhs_predicate_for (*to_p); 5695 if (final_pred != initial_pred) 5696 { 5697 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue); 5698 if (ret == GS_ERROR) 5699 return ret; 5700 } 5701 5702 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type 5703 size as argument to the call. */ 5704 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR) 5705 { 5706 tree call = TREE_OPERAND (*from_p, 0); 5707 tree vlasize = TREE_OPERAND (*from_p, 1); 5708 5709 if (TREE_CODE (call) == CALL_EXPR 5710 && CALL_EXPR_IFN (call) == IFN_VA_ARG) 5711 { 5712 int nargs = call_expr_nargs (call); 5713 tree type = TREE_TYPE (call); 5714 tree ap = CALL_EXPR_ARG (call, 0); 5715 tree tag = CALL_EXPR_ARG (call, 1); 5716 tree aptag = CALL_EXPR_ARG (call, 2); 5717 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call), 5718 IFN_VA_ARG, type, 5719 nargs + 1, ap, tag, 5720 aptag, vlasize); 5721 TREE_OPERAND (*from_p, 0) = newcall; 5722 } 5723 } 5724 5725 /* Now see if the above changed *from_p to something we handle specially. */ 5726 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p, 5727 want_value); 5728 if (ret != GS_UNHANDLED) 5729 return ret; 5730 5731 /* If we've got a variable sized assignment between two lvalues (i.e. does 5732 not involve a call), then we can make things a bit more straightforward 5733 by converting the assignment to memcpy or memset. */ 5734 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR) 5735 { 5736 tree from = TREE_OPERAND (*from_p, 0); 5737 tree size = TREE_OPERAND (*from_p, 1); 5738 5739 if (TREE_CODE (from) == CONSTRUCTOR) 5740 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p); 5741 5742 if (is_gimple_addressable (from)) 5743 { 5744 *from_p = from; 5745 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value, 5746 pre_p); 5747 } 5748 } 5749 5750 /* Transform partial stores to non-addressable complex variables into 5751 total stores. This allows us to use real instead of virtual operands 5752 for these variables, which improves optimization. */ 5753 if ((TREE_CODE (*to_p) == REALPART_EXPR 5754 || TREE_CODE (*to_p) == IMAGPART_EXPR) 5755 && is_gimple_reg (TREE_OPERAND (*to_p, 0))) 5756 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value); 5757 5758 /* Try to alleviate the effects of the gimplification creating artificial 5759 temporaries (see for example is_gimple_reg_rhs) on the debug info, but 5760 make sure not to create DECL_DEBUG_EXPR links across functions. */ 5761 if (!gimplify_ctxp->into_ssa 5762 && VAR_P (*from_p) 5763 && DECL_IGNORED_P (*from_p) 5764 && DECL_P (*to_p) 5765 && !DECL_IGNORED_P (*to_p) 5766 && decl_function_context (*to_p) == current_function_decl 5767 && decl_function_context (*from_p) == current_function_decl) 5768 { 5769 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p)) 5770 DECL_NAME (*from_p) 5771 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p))); 5772 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1; 5773 SET_DECL_DEBUG_EXPR (*from_p, *to_p); 5774 } 5775 5776 if (want_value && TREE_THIS_VOLATILE (*to_p)) 5777 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p); 5778 5779 if (TREE_CODE (*from_p) == CALL_EXPR) 5780 { 5781 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL 5782 instead of a GIMPLE_ASSIGN. */ 5783 gcall *call_stmt; 5784 if (CALL_EXPR_FN (*from_p) == NULL_TREE) 5785 { 5786 /* Gimplify internal functions created in the FEs. */ 5787 int nargs = call_expr_nargs (*from_p), i; 5788 enum internal_fn ifn = CALL_EXPR_IFN (*from_p); 5789 auto_vec<tree> vargs (nargs); 5790 5791 for (i = 0; i < nargs; i++) 5792 { 5793 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p, 5794 EXPR_LOCATION (*from_p)); 5795 vargs.quick_push (CALL_EXPR_ARG (*from_p, i)); 5796 } 5797 call_stmt = gimple_build_call_internal_vec (ifn, vargs); 5798 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p)); 5799 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p)); 5800 } 5801 else 5802 { 5803 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p)); 5804 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0); 5805 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p)); 5806 tree fndecl = get_callee_fndecl (*from_p); 5807 if (fndecl 5808 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT) 5809 && call_expr_nargs (*from_p) == 3) 5810 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3, 5811 CALL_EXPR_ARG (*from_p, 0), 5812 CALL_EXPR_ARG (*from_p, 1), 5813 CALL_EXPR_ARG (*from_p, 2)); 5814 else 5815 { 5816 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype); 5817 } 5818 } 5819 notice_special_calls (call_stmt); 5820 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p)) 5821 gimple_call_set_lhs (call_stmt, *to_p); 5822 else if (TREE_CODE (*to_p) == SSA_NAME) 5823 /* The above is somewhat premature, avoid ICEing later for a 5824 SSA name w/o a definition. We may have uses in the GIMPLE IL. 5825 ??? This doesn't make it a default-def. */ 5826 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop (); 5827 5828 assign = call_stmt; 5829 } 5830 else 5831 { 5832 assign = gimple_build_assign (*to_p, *from_p); 5833 gimple_set_location (assign, EXPR_LOCATION (*expr_p)); 5834 if (COMPARISON_CLASS_P (*from_p)) 5835 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p)); 5836 } 5837 5838 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p)) 5839 { 5840 /* We should have got an SSA name from the start. */ 5841 gcc_assert (TREE_CODE (*to_p) == SSA_NAME 5842 || ! gimple_in_ssa_p (cfun)); 5843 } 5844 5845 gimplify_seq_add_stmt (pre_p, assign); 5846 gsi = gsi_last (*pre_p); 5847 maybe_fold_stmt (&gsi); 5848 5849 if (want_value) 5850 { 5851 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p); 5852 return GS_OK; 5853 } 5854 else 5855 *expr_p = NULL; 5856 5857 return GS_ALL_DONE; 5858 } 5859 5860 /* Gimplify a comparison between two variable-sized objects. Do this 5861 with a call to BUILT_IN_MEMCMP. */ 5862 5863 static enum gimplify_status 5864 gimplify_variable_sized_compare (tree *expr_p) 5865 { 5866 location_t loc = EXPR_LOCATION (*expr_p); 5867 tree op0 = TREE_OPERAND (*expr_p, 0); 5868 tree op1 = TREE_OPERAND (*expr_p, 1); 5869 tree t, arg, dest, src, expr; 5870 5871 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0)); 5872 arg = unshare_expr (arg); 5873 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0); 5874 src = build_fold_addr_expr_loc (loc, op1); 5875 dest = build_fold_addr_expr_loc (loc, op0); 5876 t = builtin_decl_implicit (BUILT_IN_MEMCMP); 5877 t = build_call_expr_loc (loc, t, 3, dest, src, arg); 5878 5879 expr 5880 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node); 5881 SET_EXPR_LOCATION (expr, loc); 5882 *expr_p = expr; 5883 5884 return GS_OK; 5885 } 5886 5887 /* Gimplify a comparison between two aggregate objects of integral scalar 5888 mode as a comparison between the bitwise equivalent scalar values. */ 5889 5890 static enum gimplify_status 5891 gimplify_scalar_mode_aggregate_compare (tree *expr_p) 5892 { 5893 location_t loc = EXPR_LOCATION (*expr_p); 5894 tree op0 = TREE_OPERAND (*expr_p, 0); 5895 tree op1 = TREE_OPERAND (*expr_p, 1); 5896 5897 tree type = TREE_TYPE (op0); 5898 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1); 5899 5900 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0); 5901 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1); 5902 5903 *expr_p 5904 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1); 5905 5906 return GS_OK; 5907 } 5908 5909 /* Gimplify an expression sequence. This function gimplifies each 5910 expression and rewrites the original expression with the last 5911 expression of the sequence in GIMPLE form. 5912 5913 PRE_P points to the list where the side effects for all the 5914 expressions in the sequence will be emitted. 5915 5916 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */ 5917 5918 static enum gimplify_status 5919 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) 5920 { 5921 tree t = *expr_p; 5922 5923 do 5924 { 5925 tree *sub_p = &TREE_OPERAND (t, 0); 5926 5927 if (TREE_CODE (*sub_p) == COMPOUND_EXPR) 5928 gimplify_compound_expr (sub_p, pre_p, false); 5929 else 5930 gimplify_stmt (sub_p, pre_p); 5931 5932 t = TREE_OPERAND (t, 1); 5933 } 5934 while (TREE_CODE (t) == COMPOUND_EXPR); 5935 5936 *expr_p = t; 5937 if (want_value) 5938 return GS_OK; 5939 else 5940 { 5941 gimplify_stmt (expr_p, pre_p); 5942 return GS_ALL_DONE; 5943 } 5944 } 5945 5946 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to 5947 gimplify. After gimplification, EXPR_P will point to a new temporary 5948 that holds the original value of the SAVE_EXPR node. 5949 5950 PRE_P points to the list where side effects that must happen before 5951 *EXPR_P should be stored. */ 5952 5953 static enum gimplify_status 5954 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 5955 { 5956 enum gimplify_status ret = GS_ALL_DONE; 5957 tree val; 5958 5959 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR); 5960 val = TREE_OPERAND (*expr_p, 0); 5961 5962 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */ 5963 if (!SAVE_EXPR_RESOLVED_P (*expr_p)) 5964 { 5965 /* The operand may be a void-valued expression. It is 5966 being executed only for its side-effects. */ 5967 if (TREE_TYPE (val) == void_type_node) 5968 { 5969 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 5970 is_gimple_stmt, fb_none); 5971 val = NULL; 5972 } 5973 else 5974 /* The temporary may not be an SSA name as later abnormal and EH 5975 control flow may invalidate use/def domination. When in SSA 5976 form then assume there are no such issues and SAVE_EXPRs only 5977 appear via GENERIC foldings. */ 5978 val = get_initialized_tmp_var (val, pre_p, post_p, 5979 gimple_in_ssa_p (cfun)); 5980 5981 TREE_OPERAND (*expr_p, 0) = val; 5982 SAVE_EXPR_RESOLVED_P (*expr_p) = 1; 5983 } 5984 5985 *expr_p = val; 5986 5987 return ret; 5988 } 5989 5990 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P 5991 5992 unary_expr 5993 : ... 5994 | '&' varname 5995 ... 5996 5997 PRE_P points to the list where side effects that must happen before 5998 *EXPR_P should be stored. 5999 6000 POST_P points to the list where side effects that must happen after 6001 *EXPR_P should be stored. */ 6002 6003 static enum gimplify_status 6004 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 6005 { 6006 tree expr = *expr_p; 6007 tree op0 = TREE_OPERAND (expr, 0); 6008 enum gimplify_status ret; 6009 location_t loc = EXPR_LOCATION (*expr_p); 6010 6011 switch (TREE_CODE (op0)) 6012 { 6013 case INDIRECT_REF: 6014 do_indirect_ref: 6015 /* Check if we are dealing with an expression of the form '&*ptr'. 6016 While the front end folds away '&*ptr' into 'ptr', these 6017 expressions may be generated internally by the compiler (e.g., 6018 builtins like __builtin_va_end). */ 6019 /* Caution: the silent array decomposition semantics we allow for 6020 ADDR_EXPR means we can't always discard the pair. */ 6021 /* Gimplification of the ADDR_EXPR operand may drop 6022 cv-qualification conversions, so make sure we add them if 6023 needed. */ 6024 { 6025 tree op00 = TREE_OPERAND (op0, 0); 6026 tree t_expr = TREE_TYPE (expr); 6027 tree t_op00 = TREE_TYPE (op00); 6028 6029 if (!useless_type_conversion_p (t_expr, t_op00)) 6030 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00); 6031 *expr_p = op00; 6032 ret = GS_OK; 6033 } 6034 break; 6035 6036 case VIEW_CONVERT_EXPR: 6037 /* Take the address of our operand and then convert it to the type of 6038 this ADDR_EXPR. 6039 6040 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at 6041 all clear. The impact of this transformation is even less clear. */ 6042 6043 /* If the operand is a useless conversion, look through it. Doing so 6044 guarantees that the ADDR_EXPR and its operand will remain of the 6045 same type. */ 6046 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0))) 6047 op0 = TREE_OPERAND (op0, 0); 6048 6049 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr), 6050 build_fold_addr_expr_loc (loc, 6051 TREE_OPERAND (op0, 0))); 6052 ret = GS_OK; 6053 break; 6054 6055 case MEM_REF: 6056 if (integer_zerop (TREE_OPERAND (op0, 1))) 6057 goto do_indirect_ref; 6058 6059 /* fall through */ 6060 6061 default: 6062 /* If we see a call to a declared builtin or see its address 6063 being taken (we can unify those cases here) then we can mark 6064 the builtin for implicit generation by GCC. */ 6065 if (TREE_CODE (op0) == FUNCTION_DECL 6066 && fndecl_built_in_p (op0, BUILT_IN_NORMAL) 6067 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0))) 6068 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true); 6069 6070 /* We use fb_either here because the C frontend sometimes takes 6071 the address of a call that returns a struct; see 6072 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make 6073 the implied temporary explicit. */ 6074 6075 /* Make the operand addressable. */ 6076 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p, 6077 is_gimple_addressable, fb_either); 6078 if (ret == GS_ERROR) 6079 break; 6080 6081 /* Then mark it. Beware that it may not be possible to do so directly 6082 if a temporary has been created by the gimplification. */ 6083 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p); 6084 6085 op0 = TREE_OPERAND (expr, 0); 6086 6087 /* For various reasons, the gimplification of the expression 6088 may have made a new INDIRECT_REF. */ 6089 if (TREE_CODE (op0) == INDIRECT_REF 6090 || (TREE_CODE (op0) == MEM_REF 6091 && integer_zerop (TREE_OPERAND (op0, 1)))) 6092 goto do_indirect_ref; 6093 6094 mark_addressable (TREE_OPERAND (expr, 0)); 6095 6096 /* The FEs may end up building ADDR_EXPRs early on a decl with 6097 an incomplete type. Re-build ADDR_EXPRs in canonical form 6098 here. */ 6099 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr)))) 6100 *expr_p = build_fold_addr_expr (op0); 6101 6102 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */ 6103 recompute_tree_invariant_for_addr_expr (*expr_p); 6104 6105 /* If we re-built the ADDR_EXPR add a conversion to the original type 6106 if required. */ 6107 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p))) 6108 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p); 6109 6110 break; 6111 } 6112 6113 return ret; 6114 } 6115 6116 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple 6117 value; output operands should be a gimple lvalue. */ 6118 6119 static enum gimplify_status 6120 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 6121 { 6122 tree expr; 6123 int noutputs; 6124 const char **oconstraints; 6125 int i; 6126 tree link; 6127 const char *constraint; 6128 bool allows_mem, allows_reg, is_inout; 6129 enum gimplify_status ret, tret; 6130 gasm *stmt; 6131 vec<tree, va_gc> *inputs; 6132 vec<tree, va_gc> *outputs; 6133 vec<tree, va_gc> *clobbers; 6134 vec<tree, va_gc> *labels; 6135 tree link_next; 6136 6137 expr = *expr_p; 6138 noutputs = list_length (ASM_OUTPUTS (expr)); 6139 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); 6140 6141 inputs = NULL; 6142 outputs = NULL; 6143 clobbers = NULL; 6144 labels = NULL; 6145 6146 ret = GS_ALL_DONE; 6147 link_next = NULL_TREE; 6148 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next) 6149 { 6150 bool ok; 6151 size_t constraint_len; 6152 6153 link_next = TREE_CHAIN (link); 6154 6155 oconstraints[i] 6156 = constraint 6157 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 6158 constraint_len = strlen (constraint); 6159 if (constraint_len == 0) 6160 continue; 6161 6162 ok = parse_output_constraint (&constraint, i, 0, 0, 6163 &allows_mem, &allows_reg, &is_inout); 6164 if (!ok) 6165 { 6166 ret = GS_ERROR; 6167 is_inout = false; 6168 } 6169 6170 /* If we can't make copies, we can only accept memory. */ 6171 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link)))) 6172 { 6173 if (allows_mem) 6174 allows_reg = 0; 6175 else 6176 { 6177 error ("impossible constraint in %<asm%>"); 6178 error ("non-memory output %d must stay in memory", i); 6179 return GS_ERROR; 6180 } 6181 } 6182 6183 if (!allows_reg && allows_mem) 6184 mark_addressable (TREE_VALUE (link)); 6185 6186 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, 6187 is_inout ? is_gimple_min_lval : is_gimple_lvalue, 6188 fb_lvalue | fb_mayfail); 6189 if (tret == GS_ERROR) 6190 { 6191 error ("invalid lvalue in asm output %d", i); 6192 ret = tret; 6193 } 6194 6195 /* If the constraint does not allow memory make sure we gimplify 6196 it to a register if it is not already but its base is. This 6197 happens for complex and vector components. */ 6198 if (!allows_mem) 6199 { 6200 tree op = TREE_VALUE (link); 6201 if (! is_gimple_val (op) 6202 && is_gimple_reg_type (TREE_TYPE (op)) 6203 && is_gimple_reg (get_base_address (op))) 6204 { 6205 tree tem = create_tmp_reg (TREE_TYPE (op)); 6206 tree ass; 6207 if (is_inout) 6208 { 6209 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), 6210 tem, unshare_expr (op)); 6211 gimplify_and_add (ass, pre_p); 6212 } 6213 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem); 6214 gimplify_and_add (ass, post_p); 6215 6216 TREE_VALUE (link) = tem; 6217 tret = GS_OK; 6218 } 6219 } 6220 6221 vec_safe_push (outputs, link); 6222 TREE_CHAIN (link) = NULL_TREE; 6223 6224 if (is_inout) 6225 { 6226 /* An input/output operand. To give the optimizers more 6227 flexibility, split it into separate input and output 6228 operands. */ 6229 tree input; 6230 /* Buffer big enough to format a 32-bit UINT_MAX into. */ 6231 char buf[11]; 6232 6233 /* Turn the in/out constraint into an output constraint. */ 6234 char *p = xstrdup (constraint); 6235 p[0] = '='; 6236 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p); 6237 6238 /* And add a matching input constraint. */ 6239 if (allows_reg) 6240 { 6241 sprintf (buf, "%u", i); 6242 6243 /* If there are multiple alternatives in the constraint, 6244 handle each of them individually. Those that allow register 6245 will be replaced with operand number, the others will stay 6246 unchanged. */ 6247 if (strchr (p, ',') != NULL) 6248 { 6249 size_t len = 0, buflen = strlen (buf); 6250 char *beg, *end, *str, *dst; 6251 6252 for (beg = p + 1;;) 6253 { 6254 end = strchr (beg, ','); 6255 if (end == NULL) 6256 end = strchr (beg, '\0'); 6257 if ((size_t) (end - beg) < buflen) 6258 len += buflen + 1; 6259 else 6260 len += end - beg + 1; 6261 if (*end) 6262 beg = end + 1; 6263 else 6264 break; 6265 } 6266 6267 str = (char *) alloca (len); 6268 for (beg = p + 1, dst = str;;) 6269 { 6270 const char *tem; 6271 bool mem_p, reg_p, inout_p; 6272 6273 end = strchr (beg, ','); 6274 if (end) 6275 *end = '\0'; 6276 beg[-1] = '='; 6277 tem = beg - 1; 6278 parse_output_constraint (&tem, i, 0, 0, 6279 &mem_p, ®_p, &inout_p); 6280 if (dst != str) 6281 *dst++ = ','; 6282 if (reg_p) 6283 { 6284 memcpy (dst, buf, buflen); 6285 dst += buflen; 6286 } 6287 else 6288 { 6289 if (end) 6290 len = end - beg; 6291 else 6292 len = strlen (beg); 6293 memcpy (dst, beg, len); 6294 dst += len; 6295 } 6296 if (end) 6297 beg = end + 1; 6298 else 6299 break; 6300 } 6301 *dst = '\0'; 6302 input = build_string (dst - str, str); 6303 } 6304 else 6305 input = build_string (strlen (buf), buf); 6306 } 6307 else 6308 input = build_string (constraint_len - 1, constraint + 1); 6309 6310 free (p); 6311 6312 input = build_tree_list (build_tree_list (NULL_TREE, input), 6313 unshare_expr (TREE_VALUE (link))); 6314 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input); 6315 } 6316 } 6317 6318 link_next = NULL_TREE; 6319 for (link = ASM_INPUTS (expr); link; ++i, link = link_next) 6320 { 6321 link_next = TREE_CHAIN (link); 6322 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 6323 parse_input_constraint (&constraint, 0, 0, noutputs, 0, 6324 oconstraints, &allows_mem, &allows_reg); 6325 6326 /* If we can't make copies, we can only accept memory. */ 6327 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link)))) 6328 { 6329 if (allows_mem) 6330 allows_reg = 0; 6331 else 6332 { 6333 error ("impossible constraint in %<asm%>"); 6334 error ("non-memory input %d must stay in memory", i); 6335 return GS_ERROR; 6336 } 6337 } 6338 6339 /* If the operand is a memory input, it should be an lvalue. */ 6340 if (!allows_reg && allows_mem) 6341 { 6342 tree inputv = TREE_VALUE (link); 6343 STRIP_NOPS (inputv); 6344 if (TREE_CODE (inputv) == PREDECREMENT_EXPR 6345 || TREE_CODE (inputv) == PREINCREMENT_EXPR 6346 || TREE_CODE (inputv) == POSTDECREMENT_EXPR 6347 || TREE_CODE (inputv) == POSTINCREMENT_EXPR 6348 || TREE_CODE (inputv) == MODIFY_EXPR) 6349 TREE_VALUE (link) = error_mark_node; 6350 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, 6351 is_gimple_lvalue, fb_lvalue | fb_mayfail); 6352 if (tret != GS_ERROR) 6353 { 6354 /* Unlike output operands, memory inputs are not guaranteed 6355 to be lvalues by the FE, and while the expressions are 6356 marked addressable there, if it is e.g. a statement 6357 expression, temporaries in it might not end up being 6358 addressable. They might be already used in the IL and thus 6359 it is too late to make them addressable now though. */ 6360 tree x = TREE_VALUE (link); 6361 while (handled_component_p (x)) 6362 x = TREE_OPERAND (x, 0); 6363 if (TREE_CODE (x) == MEM_REF 6364 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR) 6365 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0); 6366 if ((VAR_P (x) 6367 || TREE_CODE (x) == PARM_DECL 6368 || TREE_CODE (x) == RESULT_DECL) 6369 && !TREE_ADDRESSABLE (x) 6370 && is_gimple_reg (x)) 6371 { 6372 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), 6373 input_location), 0, 6374 "memory input %d is not directly addressable", 6375 i); 6376 prepare_gimple_addressable (&TREE_VALUE (link), pre_p); 6377 } 6378 } 6379 mark_addressable (TREE_VALUE (link)); 6380 if (tret == GS_ERROR) 6381 { 6382 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location), 6383 "memory input %d is not directly addressable", i); 6384 ret = tret; 6385 } 6386 } 6387 else 6388 { 6389 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, 6390 is_gimple_asm_val, fb_rvalue); 6391 if (tret == GS_ERROR) 6392 ret = tret; 6393 } 6394 6395 TREE_CHAIN (link) = NULL_TREE; 6396 vec_safe_push (inputs, link); 6397 } 6398 6399 link_next = NULL_TREE; 6400 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next) 6401 { 6402 link_next = TREE_CHAIN (link); 6403 TREE_CHAIN (link) = NULL_TREE; 6404 vec_safe_push (clobbers, link); 6405 } 6406 6407 link_next = NULL_TREE; 6408 for (link = ASM_LABELS (expr); link; ++i, link = link_next) 6409 { 6410 link_next = TREE_CHAIN (link); 6411 TREE_CHAIN (link) = NULL_TREE; 6412 vec_safe_push (labels, link); 6413 } 6414 6415 /* Do not add ASMs with errors to the gimple IL stream. */ 6416 if (ret != GS_ERROR) 6417 { 6418 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)), 6419 inputs, outputs, clobbers, labels); 6420 6421 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0); 6422 gimple_asm_set_input (stmt, ASM_INPUT_P (expr)); 6423 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr)); 6424 6425 gimplify_seq_add_stmt (pre_p, stmt); 6426 } 6427 6428 return ret; 6429 } 6430 6431 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding 6432 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while 6433 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we 6434 return to this function. 6435 6436 FIXME should we complexify the prequeue handling instead? Or use flags 6437 for all the cleanups and let the optimizer tighten them up? The current 6438 code seems pretty fragile; it will break on a cleanup within any 6439 non-conditional nesting. But any such nesting would be broken, anyway; 6440 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct 6441 and continues out of it. We can do that at the RTL level, though, so 6442 having an optimizer to tighten up try/finally regions would be a Good 6443 Thing. */ 6444 6445 static enum gimplify_status 6446 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p) 6447 { 6448 gimple_stmt_iterator iter; 6449 gimple_seq body_sequence = NULL; 6450 6451 tree temp = voidify_wrapper_expr (*expr_p, NULL); 6452 6453 /* We only care about the number of conditions between the innermost 6454 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and 6455 any cleanups collected outside the CLEANUP_POINT_EXPR. */ 6456 int old_conds = gimplify_ctxp->conditions; 6457 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups; 6458 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr; 6459 gimplify_ctxp->conditions = 0; 6460 gimplify_ctxp->conditional_cleanups = NULL; 6461 gimplify_ctxp->in_cleanup_point_expr = true; 6462 6463 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence); 6464 6465 gimplify_ctxp->conditions = old_conds; 6466 gimplify_ctxp->conditional_cleanups = old_cleanups; 6467 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr; 6468 6469 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); ) 6470 { 6471 gimple *wce = gsi_stmt (iter); 6472 6473 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR) 6474 { 6475 if (gsi_one_before_end_p (iter)) 6476 { 6477 /* Note that gsi_insert_seq_before and gsi_remove do not 6478 scan operands, unlike some other sequence mutators. */ 6479 if (!gimple_wce_cleanup_eh_only (wce)) 6480 gsi_insert_seq_before_without_update (&iter, 6481 gimple_wce_cleanup (wce), 6482 GSI_SAME_STMT); 6483 gsi_remove (&iter, true); 6484 break; 6485 } 6486 else 6487 { 6488 gtry *gtry; 6489 gimple_seq seq; 6490 enum gimple_try_flags kind; 6491 6492 if (gimple_wce_cleanup_eh_only (wce)) 6493 kind = GIMPLE_TRY_CATCH; 6494 else 6495 kind = GIMPLE_TRY_FINALLY; 6496 seq = gsi_split_seq_after (iter); 6497 6498 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind); 6499 /* Do not use gsi_replace here, as it may scan operands. 6500 We want to do a simple structural modification only. */ 6501 gsi_set_stmt (&iter, gtry); 6502 iter = gsi_start (gtry->eval); 6503 } 6504 } 6505 else 6506 gsi_next (&iter); 6507 } 6508 6509 gimplify_seq_add_seq (pre_p, body_sequence); 6510 if (temp) 6511 { 6512 *expr_p = temp; 6513 return GS_OK; 6514 } 6515 else 6516 { 6517 *expr_p = NULL; 6518 return GS_ALL_DONE; 6519 } 6520 } 6521 6522 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP 6523 is the cleanup action required. EH_ONLY is true if the cleanup should 6524 only be executed if an exception is thrown, not on normal exit. 6525 If FORCE_UNCOND is true perform the cleanup unconditionally; this is 6526 only valid for clobbers. */ 6527 6528 static void 6529 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p, 6530 bool force_uncond = false) 6531 { 6532 gimple *wce; 6533 gimple_seq cleanup_stmts = NULL; 6534 6535 /* Errors can result in improperly nested cleanups. Which results in 6536 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */ 6537 if (seen_error ()) 6538 return; 6539 6540 if (gimple_conditional_context ()) 6541 { 6542 /* If we're in a conditional context, this is more complex. We only 6543 want to run the cleanup if we actually ran the initialization that 6544 necessitates it, but we want to run it after the end of the 6545 conditional context. So we wrap the try/finally around the 6546 condition and use a flag to determine whether or not to actually 6547 run the destructor. Thus 6548 6549 test ? f(A()) : 0 6550 6551 becomes (approximately) 6552 6553 flag = 0; 6554 try { 6555 if (test) { A::A(temp); flag = 1; val = f(temp); } 6556 else { val = 0; } 6557 } finally { 6558 if (flag) A::~A(temp); 6559 } 6560 val 6561 */ 6562 if (force_uncond) 6563 { 6564 gimplify_stmt (&cleanup, &cleanup_stmts); 6565 wce = gimple_build_wce (cleanup_stmts); 6566 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce); 6567 } 6568 else 6569 { 6570 tree flag = create_tmp_var (boolean_type_node, "cleanup"); 6571 gassign *ffalse = gimple_build_assign (flag, boolean_false_node); 6572 gassign *ftrue = gimple_build_assign (flag, boolean_true_node); 6573 6574 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL); 6575 gimplify_stmt (&cleanup, &cleanup_stmts); 6576 wce = gimple_build_wce (cleanup_stmts); 6577 6578 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse); 6579 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce); 6580 gimplify_seq_add_stmt (pre_p, ftrue); 6581 6582 /* Because of this manipulation, and the EH edges that jump 6583 threading cannot redirect, the temporary (VAR) will appear 6584 to be used uninitialized. Don't warn. */ 6585 TREE_NO_WARNING (var) = 1; 6586 } 6587 } 6588 else 6589 { 6590 gimplify_stmt (&cleanup, &cleanup_stmts); 6591 wce = gimple_build_wce (cleanup_stmts); 6592 gimple_wce_set_cleanup_eh_only (wce, eh_only); 6593 gimplify_seq_add_stmt (pre_p, wce); 6594 } 6595 } 6596 6597 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */ 6598 6599 static enum gimplify_status 6600 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 6601 { 6602 tree targ = *expr_p; 6603 tree temp = TARGET_EXPR_SLOT (targ); 6604 tree init = TARGET_EXPR_INITIAL (targ); 6605 enum gimplify_status ret; 6606 6607 bool unpoison_empty_seq = false; 6608 gimple_stmt_iterator unpoison_it; 6609 6610 if (init) 6611 { 6612 tree cleanup = NULL_TREE; 6613 6614 /* TARGET_EXPR temps aren't part of the enclosing block, so add it 6615 to the temps list. Handle also variable length TARGET_EXPRs. */ 6616 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST) 6617 { 6618 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp))) 6619 gimplify_type_sizes (TREE_TYPE (temp), pre_p); 6620 gimplify_vla_decl (temp, pre_p); 6621 } 6622 else 6623 { 6624 /* Save location where we need to place unpoisoning. It's possible 6625 that a variable will be converted to needs_to_live_in_memory. */ 6626 unpoison_it = gsi_last (*pre_p); 6627 unpoison_empty_seq = gsi_end_p (unpoison_it); 6628 6629 gimple_add_tmp_var (temp); 6630 } 6631 6632 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the 6633 expression is supposed to initialize the slot. */ 6634 if (VOID_TYPE_P (TREE_TYPE (init))) 6635 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none); 6636 else 6637 { 6638 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init); 6639 init = init_expr; 6640 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none); 6641 init = NULL; 6642 ggc_free (init_expr); 6643 } 6644 if (ret == GS_ERROR) 6645 { 6646 /* PR c++/28266 Make sure this is expanded only once. */ 6647 TARGET_EXPR_INITIAL (targ) = NULL_TREE; 6648 return GS_ERROR; 6649 } 6650 if (init) 6651 gimplify_and_add (init, pre_p); 6652 6653 /* If needed, push the cleanup for the temp. */ 6654 if (TARGET_EXPR_CLEANUP (targ)) 6655 { 6656 if (CLEANUP_EH_ONLY (targ)) 6657 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ), 6658 CLEANUP_EH_ONLY (targ), pre_p); 6659 else 6660 cleanup = TARGET_EXPR_CLEANUP (targ); 6661 } 6662 6663 /* Add a clobber for the temporary going out of scope, like 6664 gimplify_bind_expr. */ 6665 if (gimplify_ctxp->in_cleanup_point_expr 6666 && needs_to_live_in_memory (temp)) 6667 { 6668 if (flag_stack_reuse == SR_ALL) 6669 { 6670 tree clobber = build_clobber (TREE_TYPE (temp)); 6671 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber); 6672 gimple_push_cleanup (temp, clobber, false, pre_p, true); 6673 } 6674 if (asan_poisoned_variables 6675 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT 6676 && !TREE_STATIC (temp) 6677 && dbg_cnt (asan_use_after_scope) 6678 && !gimplify_omp_ctxp) 6679 { 6680 tree asan_cleanup = build_asan_poison_call_expr (temp); 6681 if (asan_cleanup) 6682 { 6683 if (unpoison_empty_seq) 6684 unpoison_it = gsi_start (*pre_p); 6685 6686 asan_poison_variable (temp, false, &unpoison_it, 6687 unpoison_empty_seq); 6688 gimple_push_cleanup (temp, asan_cleanup, false, pre_p); 6689 } 6690 } 6691 } 6692 if (cleanup) 6693 gimple_push_cleanup (temp, cleanup, false, pre_p); 6694 6695 /* Only expand this once. */ 6696 TREE_OPERAND (targ, 3) = init; 6697 TARGET_EXPR_INITIAL (targ) = NULL_TREE; 6698 } 6699 else 6700 /* We should have expanded this before. */ 6701 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp)); 6702 6703 *expr_p = temp; 6704 return GS_OK; 6705 } 6706 6707 /* Gimplification of expression trees. */ 6708 6709 /* Gimplify an expression which appears at statement context. The 6710 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is 6711 NULL, a new sequence is allocated. 6712 6713 Return true if we actually added a statement to the queue. */ 6714 6715 bool 6716 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p) 6717 { 6718 gimple_seq_node last; 6719 6720 last = gimple_seq_last (*seq_p); 6721 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none); 6722 return last != gimple_seq_last (*seq_p); 6723 } 6724 6725 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels 6726 to CTX. If entries already exist, force them to be some flavor of private. 6727 If there is no enclosing parallel, do nothing. */ 6728 6729 void 6730 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl) 6731 { 6732 splay_tree_node n; 6733 6734 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE) 6735 return; 6736 6737 do 6738 { 6739 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 6740 if (n != NULL) 6741 { 6742 if (n->value & GOVD_SHARED) 6743 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN); 6744 else if (n->value & GOVD_MAP) 6745 n->value |= GOVD_MAP_TO_ONLY; 6746 else 6747 return; 6748 } 6749 else if ((ctx->region_type & ORT_TARGET) != 0) 6750 { 6751 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE) 6752 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE); 6753 else 6754 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY); 6755 } 6756 else if (ctx->region_type != ORT_WORKSHARE 6757 && ctx->region_type != ORT_TASKGROUP 6758 && ctx->region_type != ORT_SIMD 6759 && ctx->region_type != ORT_ACC 6760 && !(ctx->region_type & ORT_TARGET_DATA)) 6761 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE); 6762 6763 ctx = ctx->outer_context; 6764 } 6765 while (ctx); 6766 } 6767 6768 /* Similarly for each of the type sizes of TYPE. */ 6769 6770 static void 6771 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type) 6772 { 6773 if (type == NULL || type == error_mark_node) 6774 return; 6775 type = TYPE_MAIN_VARIANT (type); 6776 6777 if (ctx->privatized_types->add (type)) 6778 return; 6779 6780 switch (TREE_CODE (type)) 6781 { 6782 case INTEGER_TYPE: 6783 case ENUMERAL_TYPE: 6784 case BOOLEAN_TYPE: 6785 case REAL_TYPE: 6786 case FIXED_POINT_TYPE: 6787 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type)); 6788 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type)); 6789 break; 6790 6791 case ARRAY_TYPE: 6792 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type)); 6793 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type)); 6794 break; 6795 6796 case RECORD_TYPE: 6797 case UNION_TYPE: 6798 case QUAL_UNION_TYPE: 6799 { 6800 tree field; 6801 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) 6802 if (TREE_CODE (field) == FIELD_DECL) 6803 { 6804 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field)); 6805 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field)); 6806 } 6807 } 6808 break; 6809 6810 case POINTER_TYPE: 6811 case REFERENCE_TYPE: 6812 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type)); 6813 break; 6814 6815 default: 6816 break; 6817 } 6818 6819 omp_firstprivatize_variable (ctx, TYPE_SIZE (type)); 6820 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type)); 6821 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type); 6822 } 6823 6824 /* Add an entry for DECL in the OMP context CTX with FLAGS. */ 6825 6826 static void 6827 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags) 6828 { 6829 splay_tree_node n; 6830 unsigned int nflags; 6831 tree t; 6832 6833 if (error_operand_p (decl) || ctx->region_type == ORT_NONE) 6834 return; 6835 6836 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means 6837 there are constructors involved somewhere. Exception is a shared clause, 6838 there is nothing privatized in that case. */ 6839 if ((flags & GOVD_SHARED) == 0 6840 && (TREE_ADDRESSABLE (TREE_TYPE (decl)) 6841 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))) 6842 flags |= GOVD_SEEN; 6843 6844 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 6845 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0) 6846 { 6847 /* We shouldn't be re-adding the decl with the same data 6848 sharing class. */ 6849 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0); 6850 nflags = n->value | flags; 6851 /* The only combination of data sharing classes we should see is 6852 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits 6853 reduction variables to be used in data sharing clauses. */ 6854 gcc_assert ((ctx->region_type & ORT_ACC) != 0 6855 || ((nflags & GOVD_DATA_SHARE_CLASS) 6856 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)) 6857 || (flags & GOVD_DATA_SHARE_CLASS) == 0); 6858 n->value = nflags; 6859 return; 6860 } 6861 6862 /* When adding a variable-sized variable, we have to handle all sorts 6863 of additional bits of data: the pointer replacement variable, and 6864 the parameters of the type. */ 6865 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 6866 { 6867 /* Add the pointer replacement variable as PRIVATE if the variable 6868 replacement is private, else FIRSTPRIVATE since we'll need the 6869 address of the original variable either for SHARED, or for the 6870 copy into or out of the context. */ 6871 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP) 6872 { 6873 if (flags & GOVD_MAP) 6874 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT; 6875 else if (flags & GOVD_PRIVATE) 6876 nflags = GOVD_PRIVATE; 6877 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0 6878 && (flags & GOVD_FIRSTPRIVATE)) 6879 nflags = GOVD_PRIVATE | GOVD_EXPLICIT; 6880 else 6881 nflags = GOVD_FIRSTPRIVATE; 6882 nflags |= flags & GOVD_SEEN; 6883 t = DECL_VALUE_EXPR (decl); 6884 gcc_assert (TREE_CODE (t) == INDIRECT_REF); 6885 t = TREE_OPERAND (t, 0); 6886 gcc_assert (DECL_P (t)); 6887 omp_add_variable (ctx, t, nflags); 6888 } 6889 6890 /* Add all of the variable and type parameters (which should have 6891 been gimplified to a formal temporary) as FIRSTPRIVATE. */ 6892 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl)); 6893 omp_firstprivatize_variable (ctx, DECL_SIZE (decl)); 6894 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl)); 6895 6896 /* The variable-sized variable itself is never SHARED, only some form 6897 of PRIVATE. The sharing would take place via the pointer variable 6898 which we remapped above. */ 6899 if (flags & GOVD_SHARED) 6900 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE 6901 | (flags & (GOVD_SEEN | GOVD_EXPLICIT)); 6902 6903 /* We're going to make use of the TYPE_SIZE_UNIT at least in the 6904 alloca statement we generate for the variable, so make sure it 6905 is available. This isn't automatically needed for the SHARED 6906 case, since we won't be allocating local storage then. 6907 For local variables TYPE_SIZE_UNIT might not be gimplified yet, 6908 in this case omp_notice_variable will be called later 6909 on when it is gimplified. */ 6910 else if (! (flags & (GOVD_LOCAL | GOVD_MAP)) 6911 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl)))) 6912 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true); 6913 } 6914 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0 6915 && lang_hooks.decls.omp_privatize_by_reference (decl)) 6916 { 6917 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl)); 6918 6919 /* Similar to the direct variable sized case above, we'll need the 6920 size of references being privatized. */ 6921 if ((flags & GOVD_SHARED) == 0) 6922 { 6923 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))); 6924 if (DECL_P (t)) 6925 omp_notice_variable (ctx, t, true); 6926 } 6927 } 6928 6929 if (n != NULL) 6930 n->value |= flags; 6931 else 6932 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags); 6933 6934 /* For reductions clauses in OpenACC loop directives, by default create a 6935 copy clause on the enclosing parallel construct for carrying back the 6936 results. */ 6937 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION)) 6938 { 6939 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context; 6940 while (outer_ctx) 6941 { 6942 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl); 6943 if (n != NULL) 6944 { 6945 /* Ignore local variables and explicitly declared clauses. */ 6946 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT)) 6947 break; 6948 else if (outer_ctx->region_type == ORT_ACC_KERNELS) 6949 { 6950 /* According to the OpenACC spec, such a reduction variable 6951 should already have a copy map on a kernels construct, 6952 verify that here. */ 6953 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE) 6954 && (n->value & GOVD_MAP)); 6955 } 6956 else if (outer_ctx->region_type == ORT_ACC_PARALLEL) 6957 { 6958 /* Remove firstprivate and make it a copy map. */ 6959 n->value &= ~GOVD_FIRSTPRIVATE; 6960 n->value |= GOVD_MAP; 6961 } 6962 } 6963 else if (outer_ctx->region_type == ORT_ACC_PARALLEL) 6964 { 6965 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl, 6966 GOVD_MAP | GOVD_SEEN); 6967 break; 6968 } 6969 outer_ctx = outer_ctx->outer_context; 6970 } 6971 } 6972 } 6973 6974 /* Notice a threadprivate variable DECL used in OMP context CTX. 6975 This just prints out diagnostics about threadprivate variable uses 6976 in untied tasks. If DECL2 is non-NULL, prevent this warning 6977 on that variable. */ 6978 6979 static bool 6980 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl, 6981 tree decl2) 6982 { 6983 splay_tree_node n; 6984 struct gimplify_omp_ctx *octx; 6985 6986 for (octx = ctx; octx; octx = octx->outer_context) 6987 if ((octx->region_type & ORT_TARGET) != 0) 6988 { 6989 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl); 6990 if (n == NULL) 6991 { 6992 error ("threadprivate variable %qE used in target region", 6993 DECL_NAME (decl)); 6994 error_at (octx->location, "enclosing target region"); 6995 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0); 6996 } 6997 if (decl2) 6998 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0); 6999 } 7000 7001 if (ctx->region_type != ORT_UNTIED_TASK) 7002 return false; 7003 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 7004 if (n == NULL) 7005 { 7006 error ("threadprivate variable %qE used in untied task", 7007 DECL_NAME (decl)); 7008 error_at (ctx->location, "enclosing task"); 7009 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0); 7010 } 7011 if (decl2) 7012 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0); 7013 return false; 7014 } 7015 7016 /* Return true if global var DECL is device resident. */ 7017 7018 static bool 7019 device_resident_p (tree decl) 7020 { 7021 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl)); 7022 7023 if (!attr) 7024 return false; 7025 7026 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t)) 7027 { 7028 tree c = TREE_VALUE (t); 7029 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT) 7030 return true; 7031 } 7032 7033 return false; 7034 } 7035 7036 /* Return true if DECL has an ACC DECLARE attribute. */ 7037 7038 static bool 7039 is_oacc_declared (tree decl) 7040 { 7041 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl; 7042 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t)); 7043 return declared != NULL_TREE; 7044 } 7045 7046 /* Determine outer default flags for DECL mentioned in an OMP region 7047 but not declared in an enclosing clause. 7048 7049 ??? Some compiler-generated variables (like SAVE_EXPRs) could be 7050 remapped firstprivate instead of shared. To some extent this is 7051 addressed in omp_firstprivatize_type_sizes, but not 7052 effectively. */ 7053 7054 static unsigned 7055 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl, 7056 bool in_code, unsigned flags) 7057 { 7058 enum omp_clause_default_kind default_kind = ctx->default_kind; 7059 enum omp_clause_default_kind kind; 7060 7061 kind = lang_hooks.decls.omp_predetermined_sharing (decl); 7062 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED) 7063 default_kind = kind; 7064 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl)) 7065 default_kind = OMP_CLAUSE_DEFAULT_SHARED; 7066 7067 switch (default_kind) 7068 { 7069 case OMP_CLAUSE_DEFAULT_NONE: 7070 { 7071 const char *rtype; 7072 7073 if (ctx->region_type & ORT_PARALLEL) 7074 rtype = "parallel"; 7075 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP) 7076 rtype = "taskloop"; 7077 else if (ctx->region_type & ORT_TASK) 7078 rtype = "task"; 7079 else if (ctx->region_type & ORT_TEAMS) 7080 rtype = "teams"; 7081 else 7082 gcc_unreachable (); 7083 7084 error ("%qE not specified in enclosing %qs", 7085 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype); 7086 error_at (ctx->location, "enclosing %qs", rtype); 7087 } 7088 /* FALLTHRU */ 7089 case OMP_CLAUSE_DEFAULT_SHARED: 7090 flags |= GOVD_SHARED; 7091 break; 7092 case OMP_CLAUSE_DEFAULT_PRIVATE: 7093 flags |= GOVD_PRIVATE; 7094 break; 7095 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE: 7096 flags |= GOVD_FIRSTPRIVATE; 7097 break; 7098 case OMP_CLAUSE_DEFAULT_UNSPECIFIED: 7099 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */ 7100 gcc_assert ((ctx->region_type & ORT_TASK) != 0); 7101 if (struct gimplify_omp_ctx *octx = ctx->outer_context) 7102 { 7103 omp_notice_variable (octx, decl, in_code); 7104 for (; octx; octx = octx->outer_context) 7105 { 7106 splay_tree_node n2; 7107 7108 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl); 7109 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0 7110 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0)) 7111 continue; 7112 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED) 7113 { 7114 flags |= GOVD_FIRSTPRIVATE; 7115 goto found_outer; 7116 } 7117 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0) 7118 { 7119 flags |= GOVD_SHARED; 7120 goto found_outer; 7121 } 7122 } 7123 } 7124 7125 if (TREE_CODE (decl) == PARM_DECL 7126 || (!is_global_var (decl) 7127 && DECL_CONTEXT (decl) == current_function_decl)) 7128 flags |= GOVD_FIRSTPRIVATE; 7129 else 7130 flags |= GOVD_SHARED; 7131 found_outer: 7132 break; 7133 7134 default: 7135 gcc_unreachable (); 7136 } 7137 7138 return flags; 7139 } 7140 7141 7142 /* Determine outer default flags for DECL mentioned in an OACC region 7143 but not declared in an enclosing clause. */ 7144 7145 static unsigned 7146 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags) 7147 { 7148 const char *rkind; 7149 bool on_device = false; 7150 bool declared = is_oacc_declared (decl); 7151 tree type = TREE_TYPE (decl); 7152 7153 if (lang_hooks.decls.omp_privatize_by_reference (decl)) 7154 type = TREE_TYPE (type); 7155 7156 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0 7157 && is_global_var (decl) 7158 && device_resident_p (decl)) 7159 { 7160 on_device = true; 7161 flags |= GOVD_MAP_TO_ONLY; 7162 } 7163 7164 switch (ctx->region_type) 7165 { 7166 case ORT_ACC_KERNELS: 7167 rkind = "kernels"; 7168 7169 if (AGGREGATE_TYPE_P (type)) 7170 { 7171 /* Aggregates default to 'present_or_copy', or 'present'. */ 7172 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT) 7173 flags |= GOVD_MAP; 7174 else 7175 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT; 7176 } 7177 else 7178 /* Scalars default to 'copy'. */ 7179 flags |= GOVD_MAP | GOVD_MAP_FORCE; 7180 7181 break; 7182 7183 case ORT_ACC_PARALLEL: 7184 rkind = "parallel"; 7185 7186 if (on_device || declared) 7187 flags |= GOVD_MAP; 7188 else if (AGGREGATE_TYPE_P (type)) 7189 { 7190 /* Aggregates default to 'present_or_copy', or 'present'. */ 7191 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT) 7192 flags |= GOVD_MAP; 7193 else 7194 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT; 7195 } 7196 else 7197 /* Scalars default to 'firstprivate'. */ 7198 flags |= GOVD_FIRSTPRIVATE; 7199 7200 break; 7201 7202 default: 7203 gcc_unreachable (); 7204 } 7205 7206 if (DECL_ARTIFICIAL (decl)) 7207 ; /* We can get compiler-generated decls, and should not complain 7208 about them. */ 7209 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE) 7210 { 7211 error ("%qE not specified in enclosing OpenACC %qs construct", 7212 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind); 7213 inform (ctx->location, "enclosing OpenACC %qs construct", rkind); 7214 } 7215 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT) 7216 ; /* Handled above. */ 7217 else 7218 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED); 7219 7220 return flags; 7221 } 7222 7223 /* Record the fact that DECL was used within the OMP context CTX. 7224 IN_CODE is true when real code uses DECL, and false when we should 7225 merely emit default(none) errors. Return true if DECL is going to 7226 be remapped and thus DECL shouldn't be gimplified into its 7227 DECL_VALUE_EXPR (if any). */ 7228 7229 static bool 7230 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code) 7231 { 7232 splay_tree_node n; 7233 unsigned flags = in_code ? GOVD_SEEN : 0; 7234 bool ret = false, shared; 7235 7236 if (error_operand_p (decl)) 7237 return false; 7238 7239 if (ctx->region_type == ORT_NONE) 7240 return lang_hooks.decls.omp_disregard_value_expr (decl, false); 7241 7242 if (is_global_var (decl)) 7243 { 7244 /* Threadprivate variables are predetermined. */ 7245 if (DECL_THREAD_LOCAL_P (decl)) 7246 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE); 7247 7248 if (DECL_HAS_VALUE_EXPR_P (decl)) 7249 { 7250 tree value = get_base_address (DECL_VALUE_EXPR (decl)); 7251 7252 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value)) 7253 return omp_notice_threadprivate_variable (ctx, decl, value); 7254 } 7255 7256 if (gimplify_omp_ctxp->outer_context == NULL 7257 && VAR_P (decl) 7258 && oacc_get_fn_attrib (current_function_decl)) 7259 { 7260 location_t loc = DECL_SOURCE_LOCATION (decl); 7261 7262 if (lookup_attribute ("omp declare target link", 7263 DECL_ATTRIBUTES (decl))) 7264 { 7265 error_at (loc, 7266 "%qE with %<link%> clause used in %<routine%> function", 7267 DECL_NAME (decl)); 7268 return false; 7269 } 7270 else if (!lookup_attribute ("omp declare target", 7271 DECL_ATTRIBUTES (decl))) 7272 { 7273 error_at (loc, 7274 "%qE requires a %<declare%> directive for use " 7275 "in a %<routine%> function", DECL_NAME (decl)); 7276 return false; 7277 } 7278 } 7279 } 7280 7281 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 7282 if ((ctx->region_type & ORT_TARGET) != 0) 7283 { 7284 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true); 7285 if (n == NULL) 7286 { 7287 unsigned nflags = flags; 7288 if ((ctx->region_type & ORT_ACC) == 0) 7289 { 7290 bool is_declare_target = false; 7291 if (is_global_var (decl) 7292 && varpool_node::get_create (decl)->offloadable) 7293 { 7294 struct gimplify_omp_ctx *octx; 7295 for (octx = ctx->outer_context; 7296 octx; octx = octx->outer_context) 7297 { 7298 n = splay_tree_lookup (octx->variables, 7299 (splay_tree_key)decl); 7300 if (n 7301 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED 7302 && (n->value & GOVD_DATA_SHARE_CLASS) != 0) 7303 break; 7304 } 7305 is_declare_target = octx == NULL; 7306 } 7307 if (!is_declare_target) 7308 { 7309 int gdmk; 7310 if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE 7311 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE 7312 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) 7313 == POINTER_TYPE))) 7314 gdmk = GDMK_POINTER; 7315 else if (lang_hooks.decls.omp_scalar_p (decl)) 7316 gdmk = GDMK_SCALAR; 7317 else 7318 gdmk = GDMK_AGGREGATE; 7319 if (ctx->defaultmap[gdmk] == 0) 7320 { 7321 tree d = lang_hooks.decls.omp_report_decl (decl); 7322 error ("%qE not specified in enclosing %<target%>", 7323 DECL_NAME (d)); 7324 error_at (ctx->location, "enclosing %<target%>"); 7325 } 7326 else if (ctx->defaultmap[gdmk] 7327 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE)) 7328 nflags |= ctx->defaultmap[gdmk]; 7329 else 7330 { 7331 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP); 7332 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP; 7333 } 7334 } 7335 } 7336 7337 struct gimplify_omp_ctx *octx = ctx->outer_context; 7338 if ((ctx->region_type & ORT_ACC) && octx) 7339 { 7340 /* Look in outer OpenACC contexts, to see if there's a 7341 data attribute for this variable. */ 7342 omp_notice_variable (octx, decl, in_code); 7343 7344 for (; octx; octx = octx->outer_context) 7345 { 7346 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET))) 7347 break; 7348 splay_tree_node n2 7349 = splay_tree_lookup (octx->variables, 7350 (splay_tree_key) decl); 7351 if (n2) 7352 { 7353 if (octx->region_type == ORT_ACC_HOST_DATA) 7354 error ("variable %qE declared in enclosing " 7355 "%<host_data%> region", DECL_NAME (decl)); 7356 nflags |= GOVD_MAP; 7357 if (octx->region_type == ORT_ACC_DATA 7358 && (n2->value & GOVD_MAP_0LEN_ARRAY)) 7359 nflags |= GOVD_MAP_0LEN_ARRAY; 7360 goto found_outer; 7361 } 7362 } 7363 } 7364 7365 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY 7366 | GOVD_MAP_ALLOC_ONLY)) == flags) 7367 { 7368 tree type = TREE_TYPE (decl); 7369 7370 if (gimplify_omp_ctxp->target_firstprivatize_array_bases 7371 && lang_hooks.decls.omp_privatize_by_reference (decl)) 7372 type = TREE_TYPE (type); 7373 if (!lang_hooks.types.omp_mappable_type (type)) 7374 { 7375 error ("%qD referenced in target region does not have " 7376 "a mappable type", decl); 7377 nflags |= GOVD_MAP | GOVD_EXPLICIT; 7378 } 7379 else 7380 { 7381 if ((ctx->region_type & ORT_ACC) != 0) 7382 nflags = oacc_default_clause (ctx, decl, flags); 7383 else 7384 nflags |= GOVD_MAP; 7385 } 7386 } 7387 found_outer: 7388 omp_add_variable (ctx, decl, nflags); 7389 } 7390 else 7391 { 7392 /* If nothing changed, there's nothing left to do. */ 7393 if ((n->value & flags) == flags) 7394 return ret; 7395 flags |= n->value; 7396 n->value = flags; 7397 } 7398 goto do_outer; 7399 } 7400 7401 if (n == NULL) 7402 { 7403 if (ctx->region_type == ORT_WORKSHARE 7404 || ctx->region_type == ORT_TASKGROUP 7405 || ctx->region_type == ORT_SIMD 7406 || ctx->region_type == ORT_ACC 7407 || (ctx->region_type & ORT_TARGET_DATA) != 0) 7408 goto do_outer; 7409 7410 flags = omp_default_clause (ctx, decl, in_code, flags); 7411 7412 if ((flags & GOVD_PRIVATE) 7413 && lang_hooks.decls.omp_private_outer_ref (decl)) 7414 flags |= GOVD_PRIVATE_OUTER_REF; 7415 7416 omp_add_variable (ctx, decl, flags); 7417 7418 shared = (flags & GOVD_SHARED) != 0; 7419 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared); 7420 goto do_outer; 7421 } 7422 7423 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0 7424 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN 7425 && DECL_SIZE (decl)) 7426 { 7427 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 7428 { 7429 splay_tree_node n2; 7430 tree t = DECL_VALUE_EXPR (decl); 7431 gcc_assert (TREE_CODE (t) == INDIRECT_REF); 7432 t = TREE_OPERAND (t, 0); 7433 gcc_assert (DECL_P (t)); 7434 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t); 7435 n2->value |= GOVD_SEEN; 7436 } 7437 else if (lang_hooks.decls.omp_privatize_by_reference (decl) 7438 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))) 7439 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))) 7440 != INTEGER_CST)) 7441 { 7442 splay_tree_node n2; 7443 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))); 7444 gcc_assert (DECL_P (t)); 7445 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t); 7446 if (n2) 7447 omp_notice_variable (ctx, t, true); 7448 } 7449 } 7450 7451 shared = ((flags | n->value) & GOVD_SHARED) != 0; 7452 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared); 7453 7454 /* If nothing changed, there's nothing left to do. */ 7455 if ((n->value & flags) == flags) 7456 return ret; 7457 flags |= n->value; 7458 n->value = flags; 7459 7460 do_outer: 7461 /* If the variable is private in the current context, then we don't 7462 need to propagate anything to an outer context. */ 7463 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF)) 7464 return ret; 7465 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER)) 7466 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER)) 7467 return ret; 7468 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE 7469 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER)) 7470 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER)) 7471 return ret; 7472 if (ctx->outer_context 7473 && omp_notice_variable (ctx->outer_context, decl, in_code)) 7474 return true; 7475 return ret; 7476 } 7477 7478 /* Verify that DECL is private within CTX. If there's specific information 7479 to the contrary in the innermost scope, generate an error. */ 7480 7481 static bool 7482 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd) 7483 { 7484 splay_tree_node n; 7485 7486 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 7487 if (n != NULL) 7488 { 7489 if (n->value & GOVD_SHARED) 7490 { 7491 if (ctx == gimplify_omp_ctxp) 7492 { 7493 if (simd) 7494 error ("iteration variable %qE is predetermined linear", 7495 DECL_NAME (decl)); 7496 else 7497 error ("iteration variable %qE should be private", 7498 DECL_NAME (decl)); 7499 n->value = GOVD_PRIVATE; 7500 return true; 7501 } 7502 else 7503 return false; 7504 } 7505 else if ((n->value & GOVD_EXPLICIT) != 0 7506 && (ctx == gimplify_omp_ctxp 7507 || (ctx->region_type == ORT_COMBINED_PARALLEL 7508 && gimplify_omp_ctxp->outer_context == ctx))) 7509 { 7510 if ((n->value & GOVD_FIRSTPRIVATE) != 0) 7511 error ("iteration variable %qE should not be firstprivate", 7512 DECL_NAME (decl)); 7513 else if ((n->value & GOVD_REDUCTION) != 0) 7514 error ("iteration variable %qE should not be reduction", 7515 DECL_NAME (decl)); 7516 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0) 7517 error ("iteration variable %qE should not be linear", 7518 DECL_NAME (decl)); 7519 } 7520 return (ctx == gimplify_omp_ctxp 7521 || (ctx->region_type == ORT_COMBINED_PARALLEL 7522 && gimplify_omp_ctxp->outer_context == ctx)); 7523 } 7524 7525 if (ctx->region_type != ORT_WORKSHARE 7526 && ctx->region_type != ORT_TASKGROUP 7527 && ctx->region_type != ORT_SIMD 7528 && ctx->region_type != ORT_ACC) 7529 return false; 7530 else if (ctx->outer_context) 7531 return omp_is_private (ctx->outer_context, decl, simd); 7532 return false; 7533 } 7534 7535 /* Return true if DECL is private within a parallel region 7536 that binds to the current construct's context or in parallel 7537 region's REDUCTION clause. */ 7538 7539 static bool 7540 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate) 7541 { 7542 splay_tree_node n; 7543 7544 do 7545 { 7546 ctx = ctx->outer_context; 7547 if (ctx == NULL) 7548 { 7549 if (is_global_var (decl)) 7550 return false; 7551 7552 /* References might be private, but might be shared too, 7553 when checking for copyprivate, assume they might be 7554 private, otherwise assume they might be shared. */ 7555 if (copyprivate) 7556 return true; 7557 7558 if (lang_hooks.decls.omp_privatize_by_reference (decl)) 7559 return false; 7560 7561 /* Treat C++ privatized non-static data members outside 7562 of the privatization the same. */ 7563 if (omp_member_access_dummy_var (decl)) 7564 return false; 7565 7566 return true; 7567 } 7568 7569 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 7570 7571 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0 7572 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)) 7573 continue; 7574 7575 if (n != NULL) 7576 { 7577 if ((n->value & GOVD_LOCAL) != 0 7578 && omp_member_access_dummy_var (decl)) 7579 return false; 7580 return (n->value & GOVD_SHARED) == 0; 7581 } 7582 } 7583 while (ctx->region_type == ORT_WORKSHARE 7584 || ctx->region_type == ORT_TASKGROUP 7585 || ctx->region_type == ORT_SIMD 7586 || ctx->region_type == ORT_ACC); 7587 return false; 7588 } 7589 7590 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */ 7591 7592 static tree 7593 find_decl_expr (tree *tp, int *walk_subtrees, void *data) 7594 { 7595 tree t = *tp; 7596 7597 /* If this node has been visited, unmark it and keep looking. */ 7598 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data) 7599 return t; 7600 7601 if (IS_TYPE_OR_DECL_P (t)) 7602 *walk_subtrees = 0; 7603 return NULL_TREE; 7604 } 7605 7606 /* If *LIST_P contains any OpenMP depend clauses with iterators, 7607 lower all the depend clauses by populating corresponding depend 7608 array. Returns 0 if there are no such depend clauses, or 7609 2 if all depend clauses should be removed, 1 otherwise. */ 7610 7611 static int 7612 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p) 7613 { 7614 tree c; 7615 gimple *g; 7616 size_t n[4] = { 0, 0, 0, 0 }; 7617 bool unused[4]; 7618 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE }; 7619 tree last_iter = NULL_TREE, last_count = NULL_TREE; 7620 size_t i, j; 7621 location_t first_loc = UNKNOWN_LOCATION; 7622 7623 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c)) 7624 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND) 7625 { 7626 switch (OMP_CLAUSE_DEPEND_KIND (c)) 7627 { 7628 case OMP_CLAUSE_DEPEND_IN: 7629 i = 2; 7630 break; 7631 case OMP_CLAUSE_DEPEND_OUT: 7632 case OMP_CLAUSE_DEPEND_INOUT: 7633 i = 0; 7634 break; 7635 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET: 7636 i = 1; 7637 break; 7638 case OMP_CLAUSE_DEPEND_DEPOBJ: 7639 i = 3; 7640 break; 7641 case OMP_CLAUSE_DEPEND_SOURCE: 7642 case OMP_CLAUSE_DEPEND_SINK: 7643 continue; 7644 default: 7645 gcc_unreachable (); 7646 } 7647 tree t = OMP_CLAUSE_DECL (c); 7648 if (first_loc == UNKNOWN_LOCATION) 7649 first_loc = OMP_CLAUSE_LOCATION (c); 7650 if (TREE_CODE (t) == TREE_LIST 7651 && TREE_PURPOSE (t) 7652 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC) 7653 { 7654 if (TREE_PURPOSE (t) != last_iter) 7655 { 7656 tree tcnt = size_one_node; 7657 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it)) 7658 { 7659 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL, 7660 is_gimple_val, fb_rvalue) == GS_ERROR 7661 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL, 7662 is_gimple_val, fb_rvalue) == GS_ERROR 7663 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL, 7664 is_gimple_val, fb_rvalue) == GS_ERROR 7665 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL, 7666 is_gimple_val, fb_rvalue) 7667 == GS_ERROR)) 7668 return 2; 7669 tree var = TREE_VEC_ELT (it, 0); 7670 tree begin = TREE_VEC_ELT (it, 1); 7671 tree end = TREE_VEC_ELT (it, 2); 7672 tree step = TREE_VEC_ELT (it, 3); 7673 tree orig_step = TREE_VEC_ELT (it, 4); 7674 tree type = TREE_TYPE (var); 7675 tree stype = TREE_TYPE (step); 7676 location_t loc = DECL_SOURCE_LOCATION (var); 7677 tree endmbegin; 7678 /* Compute count for this iterator as 7679 orig_step > 0 7680 ? (begin < end ? (end - begin + (step - 1)) / step : 0) 7681 : (begin > end ? (end - begin + (step + 1)) / step : 0) 7682 and compute product of those for the entire depend 7683 clause. */ 7684 if (POINTER_TYPE_P (type)) 7685 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR, 7686 stype, end, begin); 7687 else 7688 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type, 7689 end, begin); 7690 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype, 7691 step, 7692 build_int_cst (stype, 1)); 7693 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step, 7694 build_int_cst (stype, 1)); 7695 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype, 7696 unshare_expr (endmbegin), 7697 stepm1); 7698 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype, 7699 pos, step); 7700 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype, 7701 endmbegin, stepp1); 7702 if (TYPE_UNSIGNED (stype)) 7703 { 7704 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg); 7705 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step); 7706 } 7707 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype, 7708 neg, step); 7709 step = NULL_TREE; 7710 tree cond = fold_build2_loc (loc, LT_EXPR, 7711 boolean_type_node, 7712 begin, end); 7713 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos, 7714 build_int_cst (stype, 0)); 7715 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node, 7716 end, begin); 7717 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg, 7718 build_int_cst (stype, 0)); 7719 tree osteptype = TREE_TYPE (orig_step); 7720 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node, 7721 orig_step, 7722 build_int_cst (osteptype, 0)); 7723 tree cnt = fold_build3_loc (loc, COND_EXPR, stype, 7724 cond, pos, neg); 7725 cnt = fold_convert_loc (loc, sizetype, cnt); 7726 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val, 7727 fb_rvalue) == GS_ERROR) 7728 return 2; 7729 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt); 7730 } 7731 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val, 7732 fb_rvalue) == GS_ERROR) 7733 return 2; 7734 last_iter = TREE_PURPOSE (t); 7735 last_count = tcnt; 7736 } 7737 if (counts[i] == NULL_TREE) 7738 counts[i] = last_count; 7739 else 7740 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c), 7741 PLUS_EXPR, counts[i], last_count); 7742 } 7743 else 7744 n[i]++; 7745 } 7746 for (i = 0; i < 4; i++) 7747 if (counts[i]) 7748 break; 7749 if (i == 4) 7750 return 0; 7751 7752 tree total = size_zero_node; 7753 for (i = 0; i < 4; i++) 7754 { 7755 unused[i] = counts[i] == NULL_TREE && n[i] == 0; 7756 if (counts[i] == NULL_TREE) 7757 counts[i] = size_zero_node; 7758 if (n[i]) 7759 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i])); 7760 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val, 7761 fb_rvalue) == GS_ERROR) 7762 return 2; 7763 total = size_binop (PLUS_EXPR, total, counts[i]); 7764 } 7765 7766 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue) 7767 == GS_ERROR) 7768 return 2; 7769 bool is_old = unused[1] && unused[3]; 7770 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total), 7771 size_int (is_old ? 1 : 4)); 7772 tree type = build_array_type (ptr_type_node, build_index_type (totalpx)); 7773 tree array = create_tmp_var_raw (type); 7774 TREE_ADDRESSABLE (array) = 1; 7775 if (TREE_CODE (totalpx) != INTEGER_CST) 7776 { 7777 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array))) 7778 gimplify_type_sizes (TREE_TYPE (array), pre_p); 7779 if (gimplify_omp_ctxp) 7780 { 7781 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 7782 while (ctx 7783 && (ctx->region_type == ORT_WORKSHARE 7784 || ctx->region_type == ORT_TASKGROUP 7785 || ctx->region_type == ORT_SIMD 7786 || ctx->region_type == ORT_ACC)) 7787 ctx = ctx->outer_context; 7788 if (ctx) 7789 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN); 7790 } 7791 gimplify_vla_decl (array, pre_p); 7792 } 7793 else 7794 gimple_add_tmp_var (array); 7795 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE, 7796 NULL_TREE); 7797 tree tem; 7798 if (!is_old) 7799 { 7800 tem = build2 (MODIFY_EXPR, void_type_node, r, 7801 build_int_cst (ptr_type_node, 0)); 7802 gimplify_and_add (tem, pre_p); 7803 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE, 7804 NULL_TREE); 7805 } 7806 tem = build2 (MODIFY_EXPR, void_type_node, r, 7807 fold_convert (ptr_type_node, total)); 7808 gimplify_and_add (tem, pre_p); 7809 for (i = 1; i < (is_old ? 2 : 4); i++) 7810 { 7811 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old), 7812 NULL_TREE, NULL_TREE); 7813 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]); 7814 gimplify_and_add (tem, pre_p); 7815 } 7816 7817 tree cnts[4]; 7818 for (j = 4; j; j--) 7819 if (!unused[j - 1]) 7820 break; 7821 for (i = 0; i < 4; i++) 7822 { 7823 if (i && (i >= j || unused[i - 1])) 7824 { 7825 cnts[i] = cnts[i - 1]; 7826 continue; 7827 } 7828 cnts[i] = create_tmp_var (sizetype); 7829 if (i == 0) 7830 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5)); 7831 else 7832 { 7833 tree t; 7834 if (is_old) 7835 t = size_binop (PLUS_EXPR, counts[0], size_int (2)); 7836 else 7837 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]); 7838 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue) 7839 == GS_ERROR) 7840 return 2; 7841 g = gimple_build_assign (cnts[i], t); 7842 } 7843 gimple_seq_add_stmt (pre_p, g); 7844 } 7845 7846 last_iter = NULL_TREE; 7847 tree last_bind = NULL_TREE; 7848 tree *last_body = NULL; 7849 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c)) 7850 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND) 7851 { 7852 switch (OMP_CLAUSE_DEPEND_KIND (c)) 7853 { 7854 case OMP_CLAUSE_DEPEND_IN: 7855 i = 2; 7856 break; 7857 case OMP_CLAUSE_DEPEND_OUT: 7858 case OMP_CLAUSE_DEPEND_INOUT: 7859 i = 0; 7860 break; 7861 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET: 7862 i = 1; 7863 break; 7864 case OMP_CLAUSE_DEPEND_DEPOBJ: 7865 i = 3; 7866 break; 7867 case OMP_CLAUSE_DEPEND_SOURCE: 7868 case OMP_CLAUSE_DEPEND_SINK: 7869 continue; 7870 default: 7871 gcc_unreachable (); 7872 } 7873 tree t = OMP_CLAUSE_DECL (c); 7874 if (TREE_CODE (t) == TREE_LIST 7875 && TREE_PURPOSE (t) 7876 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC) 7877 { 7878 if (TREE_PURPOSE (t) != last_iter) 7879 { 7880 if (last_bind) 7881 gimplify_and_add (last_bind, pre_p); 7882 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5); 7883 last_bind = build3 (BIND_EXPR, void_type_node, 7884 BLOCK_VARS (block), NULL, block); 7885 TREE_SIDE_EFFECTS (last_bind) = 1; 7886 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c)); 7887 tree *p = &BIND_EXPR_BODY (last_bind); 7888 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it)) 7889 { 7890 tree var = TREE_VEC_ELT (it, 0); 7891 tree begin = TREE_VEC_ELT (it, 1); 7892 tree end = TREE_VEC_ELT (it, 2); 7893 tree step = TREE_VEC_ELT (it, 3); 7894 tree orig_step = TREE_VEC_ELT (it, 4); 7895 tree type = TREE_TYPE (var); 7896 location_t loc = DECL_SOURCE_LOCATION (var); 7897 /* Emit: 7898 var = begin; 7899 goto cond_label; 7900 beg_label: 7901 ... 7902 var = var + step; 7903 cond_label: 7904 if (orig_step > 0) { 7905 if (var < end) goto beg_label; 7906 } else { 7907 if (var > end) goto beg_label; 7908 } 7909 for each iterator, with inner iterators added to 7910 the ... above. */ 7911 tree beg_label = create_artificial_label (loc); 7912 tree cond_label = NULL_TREE; 7913 tem = build2_loc (loc, MODIFY_EXPR, void_type_node, 7914 var, begin); 7915 append_to_statement_list_force (tem, p); 7916 tem = build_and_jump (&cond_label); 7917 append_to_statement_list_force (tem, p); 7918 tem = build1 (LABEL_EXPR, void_type_node, beg_label); 7919 append_to_statement_list (tem, p); 7920 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE, 7921 NULL_TREE, NULL_TREE); 7922 TREE_SIDE_EFFECTS (bind) = 1; 7923 SET_EXPR_LOCATION (bind, loc); 7924 append_to_statement_list_force (bind, p); 7925 if (POINTER_TYPE_P (type)) 7926 tem = build2_loc (loc, POINTER_PLUS_EXPR, type, 7927 var, fold_convert_loc (loc, sizetype, 7928 step)); 7929 else 7930 tem = build2_loc (loc, PLUS_EXPR, type, var, step); 7931 tem = build2_loc (loc, MODIFY_EXPR, void_type_node, 7932 var, tem); 7933 append_to_statement_list_force (tem, p); 7934 tem = build1 (LABEL_EXPR, void_type_node, cond_label); 7935 append_to_statement_list (tem, p); 7936 tree cond = fold_build2_loc (loc, LT_EXPR, 7937 boolean_type_node, 7938 var, end); 7939 tree pos 7940 = fold_build3_loc (loc, COND_EXPR, void_type_node, 7941 cond, build_and_jump (&beg_label), 7942 void_node); 7943 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node, 7944 var, end); 7945 tree neg 7946 = fold_build3_loc (loc, COND_EXPR, void_type_node, 7947 cond, build_and_jump (&beg_label), 7948 void_node); 7949 tree osteptype = TREE_TYPE (orig_step); 7950 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node, 7951 orig_step, 7952 build_int_cst (osteptype, 0)); 7953 tem = fold_build3_loc (loc, COND_EXPR, void_type_node, 7954 cond, pos, neg); 7955 append_to_statement_list_force (tem, p); 7956 p = &BIND_EXPR_BODY (bind); 7957 } 7958 last_body = p; 7959 } 7960 last_iter = TREE_PURPOSE (t); 7961 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR) 7962 { 7963 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 7964 0), last_body); 7965 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1); 7966 } 7967 if (error_operand_p (TREE_VALUE (t))) 7968 return 2; 7969 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t)); 7970 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i], 7971 NULL_TREE, NULL_TREE); 7972 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR, 7973 void_type_node, r, TREE_VALUE (t)); 7974 append_to_statement_list_force (tem, last_body); 7975 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR, 7976 void_type_node, cnts[i], 7977 size_binop (PLUS_EXPR, cnts[i], size_int (1))); 7978 append_to_statement_list_force (tem, last_body); 7979 TREE_VALUE (t) = null_pointer_node; 7980 } 7981 else 7982 { 7983 if (last_bind) 7984 { 7985 gimplify_and_add (last_bind, pre_p); 7986 last_bind = NULL_TREE; 7987 } 7988 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR) 7989 { 7990 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p, 7991 NULL, is_gimple_val, fb_rvalue); 7992 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1); 7993 } 7994 if (error_operand_p (OMP_CLAUSE_DECL (c))) 7995 return 2; 7996 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c)); 7997 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL, 7998 is_gimple_val, fb_rvalue) == GS_ERROR) 7999 return 2; 8000 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i], 8001 NULL_TREE, NULL_TREE); 8002 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c)); 8003 gimplify_and_add (tem, pre_p); 8004 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i], 8005 size_int (1))); 8006 gimple_seq_add_stmt (pre_p, g); 8007 } 8008 } 8009 if (last_bind) 8010 gimplify_and_add (last_bind, pre_p); 8011 tree cond = boolean_false_node; 8012 if (is_old) 8013 { 8014 if (!unused[0]) 8015 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0], 8016 size_binop_loc (first_loc, PLUS_EXPR, counts[0], 8017 size_int (2))); 8018 if (!unused[2]) 8019 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond, 8020 build2_loc (first_loc, NE_EXPR, boolean_type_node, 8021 cnts[2], 8022 size_binop_loc (first_loc, PLUS_EXPR, 8023 totalpx, 8024 size_int (1)))); 8025 } 8026 else 8027 { 8028 tree prev = size_int (5); 8029 for (i = 0; i < 4; i++) 8030 { 8031 if (unused[i]) 8032 continue; 8033 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev); 8034 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond, 8035 build2_loc (first_loc, NE_EXPR, boolean_type_node, 8036 cnts[i], unshare_expr (prev))); 8037 } 8038 } 8039 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond, 8040 build_call_expr_loc (first_loc, 8041 builtin_decl_explicit (BUILT_IN_TRAP), 8042 0), void_node); 8043 gimplify_and_add (tem, pre_p); 8044 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND); 8045 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST; 8046 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array); 8047 OMP_CLAUSE_CHAIN (c) = *list_p; 8048 *list_p = c; 8049 return 1; 8050 } 8051 8052 /* Scan the OMP clauses in *LIST_P, installing mappings into a new 8053 and previous omp contexts. */ 8054 8055 static void 8056 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p, 8057 enum omp_region_type region_type, 8058 enum tree_code code) 8059 { 8060 struct gimplify_omp_ctx *ctx, *outer_ctx; 8061 tree c; 8062 hash_map<tree, tree> *struct_map_to_clause = NULL; 8063 tree *prev_list_p = NULL; 8064 int handled_depend_iterators = -1; 8065 int nowait = -1; 8066 8067 ctx = new_omp_context (region_type); 8068 outer_ctx = ctx->outer_context; 8069 if (code == OMP_TARGET) 8070 { 8071 if (!lang_GNU_Fortran ()) 8072 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY; 8073 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE; 8074 } 8075 if (!lang_GNU_Fortran ()) 8076 switch (code) 8077 { 8078 case OMP_TARGET: 8079 case OMP_TARGET_DATA: 8080 case OMP_TARGET_ENTER_DATA: 8081 case OMP_TARGET_EXIT_DATA: 8082 case OACC_DECLARE: 8083 case OACC_HOST_DATA: 8084 case OACC_PARALLEL: 8085 case OACC_KERNELS: 8086 ctx->target_firstprivatize_array_bases = true; 8087 default: 8088 break; 8089 } 8090 8091 while ((c = *list_p) != NULL) 8092 { 8093 bool remove = false; 8094 bool notice_outer = true; 8095 const char *check_non_private = NULL; 8096 unsigned int flags; 8097 tree decl; 8098 8099 switch (OMP_CLAUSE_CODE (c)) 8100 { 8101 case OMP_CLAUSE_PRIVATE: 8102 flags = GOVD_PRIVATE | GOVD_EXPLICIT; 8103 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c))) 8104 { 8105 flags |= GOVD_PRIVATE_OUTER_REF; 8106 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1; 8107 } 8108 else 8109 notice_outer = false; 8110 goto do_add; 8111 case OMP_CLAUSE_SHARED: 8112 flags = GOVD_SHARED | GOVD_EXPLICIT; 8113 goto do_add; 8114 case OMP_CLAUSE_FIRSTPRIVATE: 8115 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT; 8116 check_non_private = "firstprivate"; 8117 goto do_add; 8118 case OMP_CLAUSE_LASTPRIVATE: 8119 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)) 8120 switch (code) 8121 { 8122 case OMP_DISTRIBUTE: 8123 error_at (OMP_CLAUSE_LOCATION (c), 8124 "conditional %<lastprivate%> clause on " 8125 "%<distribute%> construct"); 8126 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0; 8127 break; 8128 case OMP_TASKLOOP: 8129 error_at (OMP_CLAUSE_LOCATION (c), 8130 "conditional %<lastprivate%> clause on " 8131 "%<taskloop%> construct"); 8132 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0; 8133 break; 8134 default: 8135 break; 8136 } 8137 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT; 8138 check_non_private = "lastprivate"; 8139 decl = OMP_CLAUSE_DECL (c); 8140 if (error_operand_p (decl)) 8141 goto do_add; 8142 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) 8143 && !lang_hooks.decls.omp_scalar_p (decl)) 8144 { 8145 error_at (OMP_CLAUSE_LOCATION (c), 8146 "non-scalar variable %qD in conditional " 8147 "%<lastprivate%> clause", decl); 8148 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0; 8149 } 8150 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)) 8151 sorry_at (OMP_CLAUSE_LOCATION (c), 8152 "%<conditional%> modifier on %<lastprivate%> clause " 8153 "not supported yet"); 8154 if (outer_ctx 8155 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL 8156 || ((outer_ctx->region_type & ORT_COMBINED_TEAMS) 8157 == ORT_COMBINED_TEAMS)) 8158 && splay_tree_lookup (outer_ctx->variables, 8159 (splay_tree_key) decl) == NULL) 8160 { 8161 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN); 8162 if (outer_ctx->outer_context) 8163 omp_notice_variable (outer_ctx->outer_context, decl, true); 8164 } 8165 else if (outer_ctx 8166 && (outer_ctx->region_type & ORT_TASK) != 0 8167 && outer_ctx->combined_loop 8168 && splay_tree_lookup (outer_ctx->variables, 8169 (splay_tree_key) decl) == NULL) 8170 { 8171 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN); 8172 if (outer_ctx->outer_context) 8173 omp_notice_variable (outer_ctx->outer_context, decl, true); 8174 } 8175 else if (outer_ctx 8176 && (outer_ctx->region_type == ORT_WORKSHARE 8177 || outer_ctx->region_type == ORT_ACC) 8178 && outer_ctx->combined_loop 8179 && splay_tree_lookup (outer_ctx->variables, 8180 (splay_tree_key) decl) == NULL 8181 && !omp_check_private (outer_ctx, decl, false)) 8182 { 8183 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN); 8184 if (outer_ctx->outer_context 8185 && (outer_ctx->outer_context->region_type 8186 == ORT_COMBINED_PARALLEL) 8187 && splay_tree_lookup (outer_ctx->outer_context->variables, 8188 (splay_tree_key) decl) == NULL) 8189 { 8190 struct gimplify_omp_ctx *octx = outer_ctx->outer_context; 8191 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN); 8192 if (octx->outer_context) 8193 { 8194 octx = octx->outer_context; 8195 if (octx->region_type == ORT_WORKSHARE 8196 && octx->combined_loop 8197 && splay_tree_lookup (octx->variables, 8198 (splay_tree_key) decl) == NULL 8199 && !omp_check_private (octx, decl, false)) 8200 { 8201 omp_add_variable (octx, decl, 8202 GOVD_LASTPRIVATE | GOVD_SEEN); 8203 octx = octx->outer_context; 8204 if (octx 8205 && ((octx->region_type & ORT_COMBINED_TEAMS) 8206 == ORT_COMBINED_TEAMS) 8207 && (splay_tree_lookup (octx->variables, 8208 (splay_tree_key) decl) 8209 == NULL)) 8210 { 8211 omp_add_variable (octx, decl, 8212 GOVD_SHARED | GOVD_SEEN); 8213 octx = octx->outer_context; 8214 } 8215 } 8216 if (octx) 8217 omp_notice_variable (octx, decl, true); 8218 } 8219 } 8220 else if (outer_ctx->outer_context) 8221 omp_notice_variable (outer_ctx->outer_context, decl, true); 8222 } 8223 goto do_add; 8224 case OMP_CLAUSE_REDUCTION: 8225 if (OMP_CLAUSE_REDUCTION_TASK (c)) 8226 { 8227 if (region_type == ORT_WORKSHARE) 8228 { 8229 if (nowait == -1) 8230 nowait = omp_find_clause (*list_p, 8231 OMP_CLAUSE_NOWAIT) != NULL_TREE; 8232 if (nowait 8233 && (outer_ctx == NULL 8234 || outer_ctx->region_type != ORT_COMBINED_PARALLEL)) 8235 { 8236 error_at (OMP_CLAUSE_LOCATION (c), 8237 "%<task%> reduction modifier on a construct " 8238 "with a %<nowait%> clause"); 8239 OMP_CLAUSE_REDUCTION_TASK (c) = 0; 8240 } 8241 } 8242 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL) 8243 { 8244 error_at (OMP_CLAUSE_LOCATION (c), 8245 "invalid %<task%> reduction modifier on construct " 8246 "other than %<parallel%>, %<for%> or %<sections%>"); 8247 OMP_CLAUSE_REDUCTION_TASK (c) = 0; 8248 } 8249 } 8250 /* FALLTHRU */ 8251 case OMP_CLAUSE_IN_REDUCTION: 8252 case OMP_CLAUSE_TASK_REDUCTION: 8253 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT; 8254 /* OpenACC permits reductions on private variables. */ 8255 if (!(region_type & ORT_ACC) 8256 /* taskgroup is actually not a worksharing region. */ 8257 && code != OMP_TASKGROUP) 8258 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)]; 8259 decl = OMP_CLAUSE_DECL (c); 8260 if (TREE_CODE (decl) == MEM_REF) 8261 { 8262 tree type = TREE_TYPE (decl); 8263 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p, 8264 NULL, is_gimple_val, fb_rvalue, false) 8265 == GS_ERROR) 8266 { 8267 remove = true; 8268 break; 8269 } 8270 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); 8271 if (DECL_P (v)) 8272 { 8273 omp_firstprivatize_variable (ctx, v); 8274 omp_notice_variable (ctx, v, true); 8275 } 8276 decl = TREE_OPERAND (decl, 0); 8277 if (TREE_CODE (decl) == POINTER_PLUS_EXPR) 8278 { 8279 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p, 8280 NULL, is_gimple_val, fb_rvalue, false) 8281 == GS_ERROR) 8282 { 8283 remove = true; 8284 break; 8285 } 8286 v = TREE_OPERAND (decl, 1); 8287 if (DECL_P (v)) 8288 { 8289 omp_firstprivatize_variable (ctx, v); 8290 omp_notice_variable (ctx, v, true); 8291 } 8292 decl = TREE_OPERAND (decl, 0); 8293 } 8294 if (TREE_CODE (decl) == ADDR_EXPR 8295 || TREE_CODE (decl) == INDIRECT_REF) 8296 decl = TREE_OPERAND (decl, 0); 8297 } 8298 goto do_add_decl; 8299 case OMP_CLAUSE_LINEAR: 8300 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL, 8301 is_gimple_val, fb_rvalue) == GS_ERROR) 8302 { 8303 remove = true; 8304 break; 8305 } 8306 else 8307 { 8308 if (code == OMP_SIMD 8309 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c)) 8310 { 8311 struct gimplify_omp_ctx *octx = outer_ctx; 8312 if (octx 8313 && octx->region_type == ORT_WORKSHARE 8314 && octx->combined_loop 8315 && !octx->distribute) 8316 { 8317 if (octx->outer_context 8318 && (octx->outer_context->region_type 8319 == ORT_COMBINED_PARALLEL)) 8320 octx = octx->outer_context->outer_context; 8321 else 8322 octx = octx->outer_context; 8323 } 8324 if (octx 8325 && octx->region_type == ORT_WORKSHARE 8326 && octx->combined_loop 8327 && octx->distribute) 8328 { 8329 error_at (OMP_CLAUSE_LOCATION (c), 8330 "%<linear%> clause for variable other than " 8331 "loop iterator specified on construct " 8332 "combined with %<distribute%>"); 8333 remove = true; 8334 break; 8335 } 8336 } 8337 /* For combined #pragma omp parallel for simd, need to put 8338 lastprivate and perhaps firstprivate too on the 8339 parallel. Similarly for #pragma omp for simd. */ 8340 struct gimplify_omp_ctx *octx = outer_ctx; 8341 decl = NULL_TREE; 8342 do 8343 { 8344 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c) 8345 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 8346 break; 8347 decl = OMP_CLAUSE_DECL (c); 8348 if (error_operand_p (decl)) 8349 { 8350 decl = NULL_TREE; 8351 break; 8352 } 8353 flags = GOVD_SEEN; 8354 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)) 8355 flags |= GOVD_FIRSTPRIVATE; 8356 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 8357 flags |= GOVD_LASTPRIVATE; 8358 if (octx 8359 && octx->region_type == ORT_WORKSHARE 8360 && octx->combined_loop) 8361 { 8362 if (octx->outer_context 8363 && (octx->outer_context->region_type 8364 == ORT_COMBINED_PARALLEL)) 8365 octx = octx->outer_context; 8366 else if (omp_check_private (octx, decl, false)) 8367 break; 8368 } 8369 else if (octx 8370 && (octx->region_type & ORT_TASK) != 0 8371 && octx->combined_loop) 8372 ; 8373 else if (octx 8374 && octx->region_type == ORT_COMBINED_PARALLEL 8375 && ctx->region_type == ORT_WORKSHARE 8376 && octx == outer_ctx) 8377 flags = GOVD_SEEN | GOVD_SHARED; 8378 else if (octx 8379 && ((octx->region_type & ORT_COMBINED_TEAMS) 8380 == ORT_COMBINED_TEAMS)) 8381 flags = GOVD_SEEN | GOVD_SHARED; 8382 else if (octx 8383 && octx->region_type == ORT_COMBINED_TARGET) 8384 { 8385 flags &= ~GOVD_LASTPRIVATE; 8386 if (flags == GOVD_SEEN) 8387 break; 8388 } 8389 else 8390 break; 8391 splay_tree_node on 8392 = splay_tree_lookup (octx->variables, 8393 (splay_tree_key) decl); 8394 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0) 8395 { 8396 octx = NULL; 8397 break; 8398 } 8399 omp_add_variable (octx, decl, flags); 8400 if (octx->outer_context == NULL) 8401 break; 8402 octx = octx->outer_context; 8403 } 8404 while (1); 8405 if (octx 8406 && decl 8407 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c) 8408 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))) 8409 omp_notice_variable (octx, decl, true); 8410 } 8411 flags = GOVD_LINEAR | GOVD_EXPLICIT; 8412 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c) 8413 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 8414 { 8415 notice_outer = false; 8416 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER; 8417 } 8418 goto do_add; 8419 8420 case OMP_CLAUSE_MAP: 8421 decl = OMP_CLAUSE_DECL (c); 8422 if (error_operand_p (decl)) 8423 remove = true; 8424 switch (code) 8425 { 8426 case OMP_TARGET: 8427 break; 8428 case OACC_DATA: 8429 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE) 8430 break; 8431 /* FALLTHRU */ 8432 case OMP_TARGET_DATA: 8433 case OMP_TARGET_ENTER_DATA: 8434 case OMP_TARGET_EXIT_DATA: 8435 case OACC_ENTER_DATA: 8436 case OACC_EXIT_DATA: 8437 case OACC_HOST_DATA: 8438 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER 8439 || (OMP_CLAUSE_MAP_KIND (c) 8440 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)) 8441 /* For target {,enter ,exit }data only the array slice is 8442 mapped, but not the pointer to it. */ 8443 remove = true; 8444 break; 8445 default: 8446 break; 8447 } 8448 if (remove) 8449 break; 8450 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC)) 8451 { 8452 struct gimplify_omp_ctx *octx; 8453 for (octx = outer_ctx; octx; octx = octx->outer_context) 8454 { 8455 if (octx->region_type != ORT_ACC_HOST_DATA) 8456 break; 8457 splay_tree_node n2 8458 = splay_tree_lookup (octx->variables, 8459 (splay_tree_key) decl); 8460 if (n2) 8461 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE " 8462 "declared in enclosing %<host_data%> region", 8463 DECL_NAME (decl)); 8464 } 8465 } 8466 if (OMP_CLAUSE_SIZE (c) == NULL_TREE) 8467 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl) 8468 : TYPE_SIZE_UNIT (TREE_TYPE (decl)); 8469 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p, 8470 NULL, is_gimple_val, fb_rvalue) == GS_ERROR) 8471 { 8472 remove = true; 8473 break; 8474 } 8475 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER 8476 || (OMP_CLAUSE_MAP_KIND (c) 8477 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)) 8478 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST) 8479 { 8480 OMP_CLAUSE_SIZE (c) 8481 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL, 8482 false); 8483 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c), 8484 GOVD_FIRSTPRIVATE | GOVD_SEEN); 8485 } 8486 if (!DECL_P (decl)) 8487 { 8488 tree d = decl, *pd; 8489 if (TREE_CODE (d) == ARRAY_REF) 8490 { 8491 while (TREE_CODE (d) == ARRAY_REF) 8492 d = TREE_OPERAND (d, 0); 8493 if (TREE_CODE (d) == COMPONENT_REF 8494 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE) 8495 decl = d; 8496 } 8497 pd = &OMP_CLAUSE_DECL (c); 8498 if (d == decl 8499 && TREE_CODE (decl) == INDIRECT_REF 8500 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF 8501 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0))) 8502 == REFERENCE_TYPE)) 8503 { 8504 pd = &TREE_OPERAND (decl, 0); 8505 decl = TREE_OPERAND (decl, 0); 8506 } 8507 if (TREE_CODE (decl) == COMPONENT_REF) 8508 { 8509 while (TREE_CODE (decl) == COMPONENT_REF) 8510 decl = TREE_OPERAND (decl, 0); 8511 if (TREE_CODE (decl) == INDIRECT_REF 8512 && DECL_P (TREE_OPERAND (decl, 0)) 8513 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0))) 8514 == REFERENCE_TYPE)) 8515 decl = TREE_OPERAND (decl, 0); 8516 } 8517 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue) 8518 == GS_ERROR) 8519 { 8520 remove = true; 8521 break; 8522 } 8523 if (DECL_P (decl)) 8524 { 8525 if (error_operand_p (decl)) 8526 { 8527 remove = true; 8528 break; 8529 } 8530 8531 tree stype = TREE_TYPE (decl); 8532 if (TREE_CODE (stype) == REFERENCE_TYPE) 8533 stype = TREE_TYPE (stype); 8534 if (TYPE_SIZE_UNIT (stype) == NULL 8535 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST) 8536 { 8537 error_at (OMP_CLAUSE_LOCATION (c), 8538 "mapping field %qE of variable length " 8539 "structure", OMP_CLAUSE_DECL (c)); 8540 remove = true; 8541 break; 8542 } 8543 8544 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER) 8545 { 8546 /* Error recovery. */ 8547 if (prev_list_p == NULL) 8548 { 8549 remove = true; 8550 break; 8551 } 8552 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c) 8553 { 8554 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p); 8555 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c) 8556 { 8557 remove = true; 8558 break; 8559 } 8560 } 8561 } 8562 8563 tree offset; 8564 poly_int64 bitsize, bitpos; 8565 machine_mode mode; 8566 int unsignedp, reversep, volatilep = 0; 8567 tree base = OMP_CLAUSE_DECL (c); 8568 while (TREE_CODE (base) == ARRAY_REF) 8569 base = TREE_OPERAND (base, 0); 8570 if (TREE_CODE (base) == INDIRECT_REF) 8571 base = TREE_OPERAND (base, 0); 8572 base = get_inner_reference (base, &bitsize, &bitpos, &offset, 8573 &mode, &unsignedp, &reversep, 8574 &volatilep); 8575 tree orig_base = base; 8576 if ((TREE_CODE (base) == INDIRECT_REF 8577 || (TREE_CODE (base) == MEM_REF 8578 && integer_zerop (TREE_OPERAND (base, 1)))) 8579 && DECL_P (TREE_OPERAND (base, 0)) 8580 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0))) 8581 == REFERENCE_TYPE)) 8582 base = TREE_OPERAND (base, 0); 8583 gcc_assert (base == decl 8584 && (offset == NULL_TREE 8585 || poly_int_tree_p (offset))); 8586 8587 splay_tree_node n 8588 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 8589 bool ptr = (OMP_CLAUSE_MAP_KIND (c) 8590 == GOMP_MAP_ALWAYS_POINTER); 8591 if (n == NULL || (n->value & GOVD_MAP) == 0) 8592 { 8593 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c), 8594 OMP_CLAUSE_MAP); 8595 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT); 8596 if (orig_base != base) 8597 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base); 8598 else 8599 OMP_CLAUSE_DECL (l) = decl; 8600 OMP_CLAUSE_SIZE (l) = size_int (1); 8601 if (struct_map_to_clause == NULL) 8602 struct_map_to_clause = new hash_map<tree, tree>; 8603 struct_map_to_clause->put (decl, l); 8604 if (ptr) 8605 { 8606 enum gomp_map_kind mkind 8607 = code == OMP_TARGET_EXIT_DATA 8608 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC; 8609 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 8610 OMP_CLAUSE_MAP); 8611 OMP_CLAUSE_SET_MAP_KIND (c2, mkind); 8612 OMP_CLAUSE_DECL (c2) 8613 = unshare_expr (OMP_CLAUSE_DECL (c)); 8614 OMP_CLAUSE_CHAIN (c2) = *prev_list_p; 8615 OMP_CLAUSE_SIZE (c2) 8616 = TYPE_SIZE_UNIT (ptr_type_node); 8617 OMP_CLAUSE_CHAIN (l) = c2; 8618 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c) 8619 { 8620 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p); 8621 tree c3 8622 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 8623 OMP_CLAUSE_MAP); 8624 OMP_CLAUSE_SET_MAP_KIND (c3, mkind); 8625 OMP_CLAUSE_DECL (c3) 8626 = unshare_expr (OMP_CLAUSE_DECL (c4)); 8627 OMP_CLAUSE_SIZE (c3) 8628 = TYPE_SIZE_UNIT (ptr_type_node); 8629 OMP_CLAUSE_CHAIN (c3) = *prev_list_p; 8630 OMP_CLAUSE_CHAIN (c2) = c3; 8631 } 8632 *prev_list_p = l; 8633 prev_list_p = NULL; 8634 } 8635 else 8636 { 8637 OMP_CLAUSE_CHAIN (l) = c; 8638 *list_p = l; 8639 list_p = &OMP_CLAUSE_CHAIN (l); 8640 } 8641 if (orig_base != base && code == OMP_TARGET) 8642 { 8643 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 8644 OMP_CLAUSE_MAP); 8645 enum gomp_map_kind mkind 8646 = GOMP_MAP_FIRSTPRIVATE_REFERENCE; 8647 OMP_CLAUSE_SET_MAP_KIND (c2, mkind); 8648 OMP_CLAUSE_DECL (c2) = decl; 8649 OMP_CLAUSE_SIZE (c2) = size_zero_node; 8650 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l); 8651 OMP_CLAUSE_CHAIN (l) = c2; 8652 } 8653 flags = GOVD_MAP | GOVD_EXPLICIT; 8654 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr) 8655 flags |= GOVD_SEEN; 8656 goto do_add_decl; 8657 } 8658 else 8659 { 8660 tree *osc = struct_map_to_clause->get (decl); 8661 tree *sc = NULL, *scp = NULL; 8662 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr) 8663 n->value |= GOVD_SEEN; 8664 poly_offset_int o1, o2; 8665 if (offset) 8666 o1 = wi::to_poly_offset (offset); 8667 else 8668 o1 = 0; 8669 if (maybe_ne (bitpos, 0)) 8670 o1 += bits_to_bytes_round_down (bitpos); 8671 sc = &OMP_CLAUSE_CHAIN (*osc); 8672 if (*sc != c 8673 && (OMP_CLAUSE_MAP_KIND (*sc) 8674 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)) 8675 sc = &OMP_CLAUSE_CHAIN (*sc); 8676 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc)) 8677 if (ptr && sc == prev_list_p) 8678 break; 8679 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc)) 8680 != COMPONENT_REF 8681 && (TREE_CODE (OMP_CLAUSE_DECL (*sc)) 8682 != INDIRECT_REF) 8683 && (TREE_CODE (OMP_CLAUSE_DECL (*sc)) 8684 != ARRAY_REF)) 8685 break; 8686 else 8687 { 8688 tree offset2; 8689 poly_int64 bitsize2, bitpos2; 8690 base = OMP_CLAUSE_DECL (*sc); 8691 if (TREE_CODE (base) == ARRAY_REF) 8692 { 8693 while (TREE_CODE (base) == ARRAY_REF) 8694 base = TREE_OPERAND (base, 0); 8695 if (TREE_CODE (base) != COMPONENT_REF 8696 || (TREE_CODE (TREE_TYPE (base)) 8697 != ARRAY_TYPE)) 8698 break; 8699 } 8700 else if (TREE_CODE (base) == INDIRECT_REF 8701 && (TREE_CODE (TREE_OPERAND (base, 0)) 8702 == COMPONENT_REF) 8703 && (TREE_CODE (TREE_TYPE 8704 (TREE_OPERAND (base, 0))) 8705 == REFERENCE_TYPE)) 8706 base = TREE_OPERAND (base, 0); 8707 base = get_inner_reference (base, &bitsize2, 8708 &bitpos2, &offset2, 8709 &mode, &unsignedp, 8710 &reversep, &volatilep); 8711 if ((TREE_CODE (base) == INDIRECT_REF 8712 || (TREE_CODE (base) == MEM_REF 8713 && integer_zerop (TREE_OPERAND (base, 8714 1)))) 8715 && DECL_P (TREE_OPERAND (base, 0)) 8716 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 8717 0))) 8718 == REFERENCE_TYPE)) 8719 base = TREE_OPERAND (base, 0); 8720 if (base != decl) 8721 break; 8722 if (scp) 8723 continue; 8724 gcc_assert (offset2 == NULL_TREE 8725 || poly_int_tree_p (offset2)); 8726 tree d1 = OMP_CLAUSE_DECL (*sc); 8727 tree d2 = OMP_CLAUSE_DECL (c); 8728 while (TREE_CODE (d1) == ARRAY_REF) 8729 d1 = TREE_OPERAND (d1, 0); 8730 while (TREE_CODE (d2) == ARRAY_REF) 8731 d2 = TREE_OPERAND (d2, 0); 8732 if (TREE_CODE (d1) == INDIRECT_REF) 8733 d1 = TREE_OPERAND (d1, 0); 8734 if (TREE_CODE (d2) == INDIRECT_REF) 8735 d2 = TREE_OPERAND (d2, 0); 8736 while (TREE_CODE (d1) == COMPONENT_REF) 8737 if (TREE_CODE (d2) == COMPONENT_REF 8738 && TREE_OPERAND (d1, 1) 8739 == TREE_OPERAND (d2, 1)) 8740 { 8741 d1 = TREE_OPERAND (d1, 0); 8742 d2 = TREE_OPERAND (d2, 0); 8743 } 8744 else 8745 break; 8746 if (d1 == d2) 8747 { 8748 error_at (OMP_CLAUSE_LOCATION (c), 8749 "%qE appears more than once in map " 8750 "clauses", OMP_CLAUSE_DECL (c)); 8751 remove = true; 8752 break; 8753 } 8754 if (offset2) 8755 o2 = wi::to_poly_offset (offset2); 8756 else 8757 o2 = 0; 8758 o2 += bits_to_bytes_round_down (bitpos2); 8759 if (maybe_lt (o1, o2) 8760 || (known_eq (o1, o2) 8761 && maybe_lt (bitpos, bitpos2))) 8762 { 8763 if (ptr) 8764 scp = sc; 8765 else 8766 break; 8767 } 8768 } 8769 if (remove) 8770 break; 8771 OMP_CLAUSE_SIZE (*osc) 8772 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc), 8773 size_one_node); 8774 if (ptr) 8775 { 8776 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 8777 OMP_CLAUSE_MAP); 8778 tree cl = NULL_TREE; 8779 enum gomp_map_kind mkind 8780 = code == OMP_TARGET_EXIT_DATA 8781 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC; 8782 OMP_CLAUSE_SET_MAP_KIND (c2, mkind); 8783 OMP_CLAUSE_DECL (c2) 8784 = unshare_expr (OMP_CLAUSE_DECL (c)); 8785 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p; 8786 OMP_CLAUSE_SIZE (c2) 8787 = TYPE_SIZE_UNIT (ptr_type_node); 8788 cl = scp ? *prev_list_p : c2; 8789 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c) 8790 { 8791 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p); 8792 tree c3 8793 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 8794 OMP_CLAUSE_MAP); 8795 OMP_CLAUSE_SET_MAP_KIND (c3, mkind); 8796 OMP_CLAUSE_DECL (c3) 8797 = unshare_expr (OMP_CLAUSE_DECL (c4)); 8798 OMP_CLAUSE_SIZE (c3) 8799 = TYPE_SIZE_UNIT (ptr_type_node); 8800 OMP_CLAUSE_CHAIN (c3) = *prev_list_p; 8801 if (!scp) 8802 OMP_CLAUSE_CHAIN (c2) = c3; 8803 else 8804 cl = c3; 8805 } 8806 if (scp) 8807 *scp = c2; 8808 if (sc == prev_list_p) 8809 { 8810 *sc = cl; 8811 prev_list_p = NULL; 8812 } 8813 else 8814 { 8815 *prev_list_p = OMP_CLAUSE_CHAIN (c); 8816 list_p = prev_list_p; 8817 prev_list_p = NULL; 8818 OMP_CLAUSE_CHAIN (c) = *sc; 8819 *sc = cl; 8820 continue; 8821 } 8822 } 8823 else if (*sc != c) 8824 { 8825 *list_p = OMP_CLAUSE_CHAIN (c); 8826 OMP_CLAUSE_CHAIN (c) = *sc; 8827 *sc = c; 8828 continue; 8829 } 8830 } 8831 } 8832 if (!remove 8833 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER 8834 && OMP_CLAUSE_CHAIN (c) 8835 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP 8836 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c)) 8837 == GOMP_MAP_ALWAYS_POINTER)) 8838 prev_list_p = list_p; 8839 break; 8840 } 8841 flags = GOVD_MAP | GOVD_EXPLICIT; 8842 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO 8843 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM) 8844 flags |= GOVD_MAP_ALWAYS_TO; 8845 goto do_add; 8846 8847 case OMP_CLAUSE_DEPEND: 8848 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK) 8849 { 8850 tree deps = OMP_CLAUSE_DECL (c); 8851 while (deps && TREE_CODE (deps) == TREE_LIST) 8852 { 8853 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR 8854 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1))) 8855 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1), 8856 pre_p, NULL, is_gimple_val, fb_rvalue); 8857 deps = TREE_CHAIN (deps); 8858 } 8859 break; 8860 } 8861 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE) 8862 break; 8863 if (handled_depend_iterators == -1) 8864 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p); 8865 if (handled_depend_iterators) 8866 { 8867 if (handled_depend_iterators == 2) 8868 remove = true; 8869 break; 8870 } 8871 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR) 8872 { 8873 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p, 8874 NULL, is_gimple_val, fb_rvalue); 8875 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1); 8876 } 8877 if (error_operand_p (OMP_CLAUSE_DECL (c))) 8878 { 8879 remove = true; 8880 break; 8881 } 8882 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c)); 8883 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL, 8884 is_gimple_val, fb_rvalue) == GS_ERROR) 8885 { 8886 remove = true; 8887 break; 8888 } 8889 break; 8890 8891 case OMP_CLAUSE_TO: 8892 case OMP_CLAUSE_FROM: 8893 case OMP_CLAUSE__CACHE_: 8894 decl = OMP_CLAUSE_DECL (c); 8895 if (error_operand_p (decl)) 8896 { 8897 remove = true; 8898 break; 8899 } 8900 if (OMP_CLAUSE_SIZE (c) == NULL_TREE) 8901 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl) 8902 : TYPE_SIZE_UNIT (TREE_TYPE (decl)); 8903 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p, 8904 NULL, is_gimple_val, fb_rvalue) == GS_ERROR) 8905 { 8906 remove = true; 8907 break; 8908 } 8909 if (!DECL_P (decl)) 8910 { 8911 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, 8912 NULL, is_gimple_lvalue, fb_lvalue) 8913 == GS_ERROR) 8914 { 8915 remove = true; 8916 break; 8917 } 8918 break; 8919 } 8920 goto do_notice; 8921 8922 case OMP_CLAUSE_USE_DEVICE_PTR: 8923 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT; 8924 goto do_add; 8925 case OMP_CLAUSE_IS_DEVICE_PTR: 8926 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT; 8927 goto do_add; 8928 8929 do_add: 8930 decl = OMP_CLAUSE_DECL (c); 8931 do_add_decl: 8932 if (error_operand_p (decl)) 8933 { 8934 remove = true; 8935 break; 8936 } 8937 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0) 8938 { 8939 tree t = omp_member_access_dummy_var (decl); 8940 if (t) 8941 { 8942 tree v = DECL_VALUE_EXPR (decl); 8943 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1)); 8944 if (outer_ctx) 8945 omp_notice_variable (outer_ctx, t, true); 8946 } 8947 } 8948 if (code == OACC_DATA 8949 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 8950 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER) 8951 flags |= GOVD_MAP_0LEN_ARRAY; 8952 omp_add_variable (ctx, decl, flags); 8953 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION 8954 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION 8955 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) 8956 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 8957 { 8958 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c), 8959 GOVD_LOCAL | GOVD_SEEN); 8960 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) 8961 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c), 8962 find_decl_expr, 8963 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c), 8964 NULL) == NULL_TREE) 8965 omp_add_variable (ctx, 8966 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c), 8967 GOVD_LOCAL | GOVD_SEEN); 8968 gimplify_omp_ctxp = ctx; 8969 push_gimplify_context (); 8970 8971 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL; 8972 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; 8973 8974 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c), 8975 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)); 8976 pop_gimplify_context 8977 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))); 8978 push_gimplify_context (); 8979 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c), 8980 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)); 8981 pop_gimplify_context 8982 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c))); 8983 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE; 8984 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE; 8985 8986 gimplify_omp_ctxp = outer_ctx; 8987 } 8988 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 8989 && OMP_CLAUSE_LASTPRIVATE_STMT (c)) 8990 { 8991 gimplify_omp_ctxp = ctx; 8992 push_gimplify_context (); 8993 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR) 8994 { 8995 tree bind = build3 (BIND_EXPR, void_type_node, NULL, 8996 NULL, NULL); 8997 TREE_SIDE_EFFECTS (bind) = 1; 8998 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c); 8999 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind; 9000 } 9001 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c), 9002 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)); 9003 pop_gimplify_context 9004 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))); 9005 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE; 9006 9007 gimplify_omp_ctxp = outer_ctx; 9008 } 9009 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 9010 && OMP_CLAUSE_LINEAR_STMT (c)) 9011 { 9012 gimplify_omp_ctxp = ctx; 9013 push_gimplify_context (); 9014 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR) 9015 { 9016 tree bind = build3 (BIND_EXPR, void_type_node, NULL, 9017 NULL, NULL); 9018 TREE_SIDE_EFFECTS (bind) = 1; 9019 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c); 9020 OMP_CLAUSE_LINEAR_STMT (c) = bind; 9021 } 9022 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c), 9023 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)); 9024 pop_gimplify_context 9025 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))); 9026 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE; 9027 9028 gimplify_omp_ctxp = outer_ctx; 9029 } 9030 if (notice_outer) 9031 goto do_notice; 9032 break; 9033 9034 case OMP_CLAUSE_COPYIN: 9035 case OMP_CLAUSE_COPYPRIVATE: 9036 decl = OMP_CLAUSE_DECL (c); 9037 if (error_operand_p (decl)) 9038 { 9039 remove = true; 9040 break; 9041 } 9042 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE 9043 && !remove 9044 && !omp_check_private (ctx, decl, true)) 9045 { 9046 remove = true; 9047 if (is_global_var (decl)) 9048 { 9049 if (DECL_THREAD_LOCAL_P (decl)) 9050 remove = false; 9051 else if (DECL_HAS_VALUE_EXPR_P (decl)) 9052 { 9053 tree value = get_base_address (DECL_VALUE_EXPR (decl)); 9054 9055 if (value 9056 && DECL_P (value) 9057 && DECL_THREAD_LOCAL_P (value)) 9058 remove = false; 9059 } 9060 } 9061 if (remove) 9062 error_at (OMP_CLAUSE_LOCATION (c), 9063 "copyprivate variable %qE is not threadprivate" 9064 " or private in outer context", DECL_NAME (decl)); 9065 } 9066 do_notice: 9067 if ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP 9068 && outer_ctx 9069 && outer_ctx->region_type == ORT_COMBINED_PARALLEL 9070 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION 9071 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE 9072 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)) 9073 { 9074 splay_tree_node on 9075 = splay_tree_lookup (outer_ctx->variables, 9076 (splay_tree_key)decl); 9077 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0) 9078 { 9079 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION 9080 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF 9081 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE 9082 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE 9083 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) 9084 == POINTER_TYPE)))) 9085 omp_firstprivatize_variable (outer_ctx, decl); 9086 else 9087 { 9088 omp_add_variable (outer_ctx, decl, 9089 GOVD_SEEN | GOVD_SHARED); 9090 if (outer_ctx->outer_context) 9091 omp_notice_variable (outer_ctx->outer_context, decl, 9092 true); 9093 } 9094 } 9095 } 9096 if (outer_ctx) 9097 omp_notice_variable (outer_ctx, decl, true); 9098 if (check_non_private 9099 && region_type == ORT_WORKSHARE 9100 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION 9101 || decl == OMP_CLAUSE_DECL (c) 9102 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF 9103 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0)) 9104 == ADDR_EXPR 9105 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0)) 9106 == POINTER_PLUS_EXPR 9107 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND 9108 (OMP_CLAUSE_DECL (c), 0), 0)) 9109 == ADDR_EXPR))))) 9110 && omp_check_private (ctx, decl, false)) 9111 { 9112 error ("%s variable %qE is private in outer context", 9113 check_non_private, DECL_NAME (decl)); 9114 remove = true; 9115 } 9116 break; 9117 9118 case OMP_CLAUSE_IF: 9119 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK 9120 && OMP_CLAUSE_IF_MODIFIER (c) != code) 9121 { 9122 const char *p[2]; 9123 for (int i = 0; i < 2; i++) 9124 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code) 9125 { 9126 case VOID_CST: p[i] = "cancel"; break; 9127 case OMP_PARALLEL: p[i] = "parallel"; break; 9128 case OMP_SIMD: p[i] = "simd"; break; 9129 case OMP_TASK: p[i] = "task"; break; 9130 case OMP_TASKLOOP: p[i] = "taskloop"; break; 9131 case OMP_TARGET_DATA: p[i] = "target data"; break; 9132 case OMP_TARGET: p[i] = "target"; break; 9133 case OMP_TARGET_UPDATE: p[i] = "target update"; break; 9134 case OMP_TARGET_ENTER_DATA: 9135 p[i] = "target enter data"; break; 9136 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break; 9137 default: gcc_unreachable (); 9138 } 9139 error_at (OMP_CLAUSE_LOCATION (c), 9140 "expected %qs %<if%> clause modifier rather than %qs", 9141 p[0], p[1]); 9142 remove = true; 9143 } 9144 /* Fall through. */ 9145 9146 case OMP_CLAUSE_FINAL: 9147 OMP_CLAUSE_OPERAND (c, 0) 9148 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0)); 9149 /* Fall through. */ 9150 9151 case OMP_CLAUSE_SCHEDULE: 9152 case OMP_CLAUSE_NUM_THREADS: 9153 case OMP_CLAUSE_NUM_TEAMS: 9154 case OMP_CLAUSE_THREAD_LIMIT: 9155 case OMP_CLAUSE_DIST_SCHEDULE: 9156 case OMP_CLAUSE_DEVICE: 9157 case OMP_CLAUSE_PRIORITY: 9158 case OMP_CLAUSE_GRAINSIZE: 9159 case OMP_CLAUSE_NUM_TASKS: 9160 case OMP_CLAUSE_HINT: 9161 case OMP_CLAUSE_ASYNC: 9162 case OMP_CLAUSE_WAIT: 9163 case OMP_CLAUSE_NUM_GANGS: 9164 case OMP_CLAUSE_NUM_WORKERS: 9165 case OMP_CLAUSE_VECTOR_LENGTH: 9166 case OMP_CLAUSE_WORKER: 9167 case OMP_CLAUSE_VECTOR: 9168 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL, 9169 is_gimple_val, fb_rvalue) == GS_ERROR) 9170 remove = true; 9171 break; 9172 9173 case OMP_CLAUSE_GANG: 9174 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL, 9175 is_gimple_val, fb_rvalue) == GS_ERROR) 9176 remove = true; 9177 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL, 9178 is_gimple_val, fb_rvalue) == GS_ERROR) 9179 remove = true; 9180 break; 9181 9182 case OMP_CLAUSE_NOWAIT: 9183 nowait = 1; 9184 break; 9185 9186 case OMP_CLAUSE_ORDERED: 9187 case OMP_CLAUSE_UNTIED: 9188 case OMP_CLAUSE_COLLAPSE: 9189 case OMP_CLAUSE_TILE: 9190 case OMP_CLAUSE_AUTO: 9191 case OMP_CLAUSE_SEQ: 9192 case OMP_CLAUSE_INDEPENDENT: 9193 case OMP_CLAUSE_MERGEABLE: 9194 case OMP_CLAUSE_PROC_BIND: 9195 case OMP_CLAUSE_SAFELEN: 9196 case OMP_CLAUSE_SIMDLEN: 9197 case OMP_CLAUSE_NOGROUP: 9198 case OMP_CLAUSE_THREADS: 9199 case OMP_CLAUSE_SIMD: 9200 case OMP_CLAUSE_IF_PRESENT: 9201 case OMP_CLAUSE_FINALIZE: 9202 break; 9203 9204 case OMP_CLAUSE_DEFAULTMAP: 9205 enum gimplify_defaultmap_kind gdmkmin, gdmkmax; 9206 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c)) 9207 { 9208 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED: 9209 gdmkmin = GDMK_SCALAR; 9210 gdmkmax = GDMK_POINTER; 9211 break; 9212 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR: 9213 gdmkmin = gdmkmax = GDMK_SCALAR; 9214 break; 9215 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE: 9216 gdmkmin = gdmkmax = GDMK_AGGREGATE; 9217 break; 9218 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE: 9219 gdmkmin = gdmkmax = GDMK_ALLOCATABLE; 9220 break; 9221 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER: 9222 gdmkmin = gdmkmax = GDMK_POINTER; 9223 break; 9224 default: 9225 gcc_unreachable (); 9226 } 9227 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++) 9228 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c)) 9229 { 9230 case OMP_CLAUSE_DEFAULTMAP_ALLOC: 9231 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY; 9232 break; 9233 case OMP_CLAUSE_DEFAULTMAP_TO: 9234 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY; 9235 break; 9236 case OMP_CLAUSE_DEFAULTMAP_FROM: 9237 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY; 9238 break; 9239 case OMP_CLAUSE_DEFAULTMAP_TOFROM: 9240 ctx->defaultmap[gdmk] = GOVD_MAP; 9241 break; 9242 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE: 9243 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE; 9244 break; 9245 case OMP_CLAUSE_DEFAULTMAP_NONE: 9246 ctx->defaultmap[gdmk] = 0; 9247 break; 9248 case OMP_CLAUSE_DEFAULTMAP_DEFAULT: 9249 switch (gdmk) 9250 { 9251 case GDMK_SCALAR: 9252 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE; 9253 break; 9254 case GDMK_AGGREGATE: 9255 case GDMK_ALLOCATABLE: 9256 ctx->defaultmap[gdmk] = GOVD_MAP; 9257 break; 9258 case GDMK_POINTER: 9259 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY; 9260 break; 9261 default: 9262 gcc_unreachable (); 9263 } 9264 break; 9265 default: 9266 gcc_unreachable (); 9267 } 9268 break; 9269 9270 case OMP_CLAUSE_ALIGNED: 9271 decl = OMP_CLAUSE_DECL (c); 9272 if (error_operand_p (decl)) 9273 { 9274 remove = true; 9275 break; 9276 } 9277 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL, 9278 is_gimple_val, fb_rvalue) == GS_ERROR) 9279 { 9280 remove = true; 9281 break; 9282 } 9283 if (!is_global_var (decl) 9284 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE) 9285 omp_add_variable (ctx, decl, GOVD_ALIGNED); 9286 break; 9287 9288 case OMP_CLAUSE_NONTEMPORAL: 9289 decl = OMP_CLAUSE_DECL (c); 9290 if (error_operand_p (decl)) 9291 { 9292 remove = true; 9293 break; 9294 } 9295 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL); 9296 break; 9297 9298 case OMP_CLAUSE_DEFAULT: 9299 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c); 9300 break; 9301 9302 default: 9303 gcc_unreachable (); 9304 } 9305 9306 if (code == OACC_DATA 9307 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 9308 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER 9309 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)) 9310 remove = true; 9311 if (remove) 9312 *list_p = OMP_CLAUSE_CHAIN (c); 9313 else 9314 list_p = &OMP_CLAUSE_CHAIN (c); 9315 } 9316 9317 gimplify_omp_ctxp = ctx; 9318 if (struct_map_to_clause) 9319 delete struct_map_to_clause; 9320 } 9321 9322 /* Return true if DECL is a candidate for shared to firstprivate 9323 optimization. We only consider non-addressable scalars, not 9324 too big, and not references. */ 9325 9326 static bool 9327 omp_shared_to_firstprivate_optimizable_decl_p (tree decl) 9328 { 9329 if (TREE_ADDRESSABLE (decl)) 9330 return false; 9331 tree type = TREE_TYPE (decl); 9332 if (!is_gimple_reg_type (type) 9333 || TREE_CODE (type) == REFERENCE_TYPE 9334 || TREE_ADDRESSABLE (type)) 9335 return false; 9336 /* Don't optimize too large decls, as each thread/task will have 9337 its own. */ 9338 HOST_WIDE_INT len = int_size_in_bytes (type); 9339 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT) 9340 return false; 9341 if (lang_hooks.decls.omp_privatize_by_reference (decl)) 9342 return false; 9343 return true; 9344 } 9345 9346 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*. 9347 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as 9348 GOVD_WRITTEN in outer contexts. */ 9349 9350 static void 9351 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl) 9352 { 9353 for (; ctx; ctx = ctx->outer_context) 9354 { 9355 splay_tree_node n = splay_tree_lookup (ctx->variables, 9356 (splay_tree_key) decl); 9357 if (n == NULL) 9358 continue; 9359 else if (n->value & GOVD_SHARED) 9360 { 9361 n->value |= GOVD_WRITTEN; 9362 return; 9363 } 9364 else if (n->value & GOVD_DATA_SHARE_CLASS) 9365 return; 9366 } 9367 } 9368 9369 /* Helper callback for walk_gimple_seq to discover possible stores 9370 to omp_shared_to_firstprivate_optimizable_decl_p decls and set 9371 GOVD_WRITTEN if they are GOVD_SHARED in some outer context 9372 for those. */ 9373 9374 static tree 9375 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data) 9376 { 9377 struct walk_stmt_info *wi = (struct walk_stmt_info *) data; 9378 9379 *walk_subtrees = 0; 9380 if (!wi->is_lhs) 9381 return NULL_TREE; 9382 9383 tree op = *tp; 9384 do 9385 { 9386 if (handled_component_p (op)) 9387 op = TREE_OPERAND (op, 0); 9388 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF) 9389 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR) 9390 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0); 9391 else 9392 break; 9393 } 9394 while (1); 9395 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op)) 9396 return NULL_TREE; 9397 9398 omp_mark_stores (gimplify_omp_ctxp, op); 9399 return NULL_TREE; 9400 } 9401 9402 /* Helper callback for walk_gimple_seq to discover possible stores 9403 to omp_shared_to_firstprivate_optimizable_decl_p decls and set 9404 GOVD_WRITTEN if they are GOVD_SHARED in some outer context 9405 for those. */ 9406 9407 static tree 9408 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p, 9409 bool *handled_ops_p, 9410 struct walk_stmt_info *wi) 9411 { 9412 gimple *stmt = gsi_stmt (*gsi_p); 9413 switch (gimple_code (stmt)) 9414 { 9415 /* Don't recurse on OpenMP constructs for which 9416 gimplify_adjust_omp_clauses already handled the bodies, 9417 except handle gimple_omp_for_pre_body. */ 9418 case GIMPLE_OMP_FOR: 9419 *handled_ops_p = true; 9420 if (gimple_omp_for_pre_body (stmt)) 9421 walk_gimple_seq (gimple_omp_for_pre_body (stmt), 9422 omp_find_stores_stmt, omp_find_stores_op, wi); 9423 break; 9424 case GIMPLE_OMP_PARALLEL: 9425 case GIMPLE_OMP_TASK: 9426 case GIMPLE_OMP_SECTIONS: 9427 case GIMPLE_OMP_SINGLE: 9428 case GIMPLE_OMP_TARGET: 9429 case GIMPLE_OMP_TEAMS: 9430 case GIMPLE_OMP_CRITICAL: 9431 *handled_ops_p = true; 9432 break; 9433 default: 9434 break; 9435 } 9436 return NULL_TREE; 9437 } 9438 9439 struct gimplify_adjust_omp_clauses_data 9440 { 9441 tree *list_p; 9442 gimple_seq *pre_p; 9443 }; 9444 9445 /* For all variables that were not actually used within the context, 9446 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */ 9447 9448 static int 9449 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data) 9450 { 9451 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p; 9452 gimple_seq *pre_p 9453 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p; 9454 tree decl = (tree) n->key; 9455 unsigned flags = n->value; 9456 enum omp_clause_code code; 9457 tree clause; 9458 bool private_debug; 9459 9460 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL)) 9461 return 0; 9462 if ((flags & GOVD_SEEN) == 0) 9463 return 0; 9464 if (flags & GOVD_DEBUG_PRIVATE) 9465 { 9466 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED); 9467 private_debug = true; 9468 } 9469 else if (flags & GOVD_MAP) 9470 private_debug = false; 9471 else 9472 private_debug 9473 = lang_hooks.decls.omp_private_debug_clause (decl, 9474 !!(flags & GOVD_SHARED)); 9475 if (private_debug) 9476 code = OMP_CLAUSE_PRIVATE; 9477 else if (flags & GOVD_MAP) 9478 { 9479 code = OMP_CLAUSE_MAP; 9480 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0 9481 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl)))) 9482 { 9483 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl); 9484 return 0; 9485 } 9486 } 9487 else if (flags & GOVD_SHARED) 9488 { 9489 if (is_global_var (decl)) 9490 { 9491 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context; 9492 while (ctx != NULL) 9493 { 9494 splay_tree_node on 9495 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 9496 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE 9497 | GOVD_PRIVATE | GOVD_REDUCTION 9498 | GOVD_LINEAR | GOVD_MAP)) != 0) 9499 break; 9500 ctx = ctx->outer_context; 9501 } 9502 if (ctx == NULL) 9503 return 0; 9504 } 9505 code = OMP_CLAUSE_SHARED; 9506 } 9507 else if (flags & GOVD_PRIVATE) 9508 code = OMP_CLAUSE_PRIVATE; 9509 else if (flags & GOVD_FIRSTPRIVATE) 9510 { 9511 code = OMP_CLAUSE_FIRSTPRIVATE; 9512 if ((gimplify_omp_ctxp->region_type & ORT_TARGET) 9513 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0 9514 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl)))) 9515 { 9516 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on " 9517 "%<target%> construct", decl); 9518 return 0; 9519 } 9520 } 9521 else if (flags & GOVD_LASTPRIVATE) 9522 code = OMP_CLAUSE_LASTPRIVATE; 9523 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL)) 9524 return 0; 9525 else 9526 gcc_unreachable (); 9527 9528 if (((flags & GOVD_LASTPRIVATE) 9529 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN))) 9530 && omp_shared_to_firstprivate_optimizable_decl_p (decl)) 9531 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl); 9532 9533 tree chain = *list_p; 9534 clause = build_omp_clause (input_location, code); 9535 OMP_CLAUSE_DECL (clause) = decl; 9536 OMP_CLAUSE_CHAIN (clause) = chain; 9537 if (private_debug) 9538 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1; 9539 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF)) 9540 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1; 9541 else if (code == OMP_CLAUSE_SHARED 9542 && (flags & GOVD_WRITTEN) == 0 9543 && omp_shared_to_firstprivate_optimizable_decl_p (decl)) 9544 OMP_CLAUSE_SHARED_READONLY (clause) = 1; 9545 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0) 9546 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1; 9547 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0) 9548 { 9549 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP); 9550 OMP_CLAUSE_DECL (nc) = decl; 9551 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE 9552 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE) 9553 OMP_CLAUSE_DECL (clause) 9554 = build_simple_mem_ref_loc (input_location, decl); 9555 OMP_CLAUSE_DECL (clause) 9556 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause), 9557 build_int_cst (build_pointer_type (char_type_node), 0)); 9558 OMP_CLAUSE_SIZE (clause) = size_zero_node; 9559 OMP_CLAUSE_SIZE (nc) = size_zero_node; 9560 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC); 9561 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1; 9562 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER); 9563 OMP_CLAUSE_CHAIN (nc) = chain; 9564 OMP_CLAUSE_CHAIN (clause) = nc; 9565 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 9566 gimplify_omp_ctxp = ctx->outer_context; 9567 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0), 9568 pre_p, NULL, is_gimple_val, fb_rvalue); 9569 gimplify_omp_ctxp = ctx; 9570 } 9571 else if (code == OMP_CLAUSE_MAP) 9572 { 9573 int kind; 9574 /* Not all combinations of these GOVD_MAP flags are actually valid. */ 9575 switch (flags & (GOVD_MAP_TO_ONLY 9576 | GOVD_MAP_FORCE 9577 | GOVD_MAP_FORCE_PRESENT 9578 | GOVD_MAP_ALLOC_ONLY 9579 | GOVD_MAP_FROM_ONLY)) 9580 { 9581 case 0: 9582 kind = GOMP_MAP_TOFROM; 9583 break; 9584 case GOVD_MAP_FORCE: 9585 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE; 9586 break; 9587 case GOVD_MAP_TO_ONLY: 9588 kind = GOMP_MAP_TO; 9589 break; 9590 case GOVD_MAP_FROM_ONLY: 9591 kind = GOMP_MAP_FROM; 9592 break; 9593 case GOVD_MAP_ALLOC_ONLY: 9594 kind = GOMP_MAP_ALLOC; 9595 break; 9596 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE: 9597 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE; 9598 break; 9599 case GOVD_MAP_FORCE_PRESENT: 9600 kind = GOMP_MAP_FORCE_PRESENT; 9601 break; 9602 default: 9603 gcc_unreachable (); 9604 } 9605 OMP_CLAUSE_SET_MAP_KIND (clause, kind); 9606 if (DECL_SIZE (decl) 9607 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 9608 { 9609 tree decl2 = DECL_VALUE_EXPR (decl); 9610 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); 9611 decl2 = TREE_OPERAND (decl2, 0); 9612 gcc_assert (DECL_P (decl2)); 9613 tree mem = build_simple_mem_ref (decl2); 9614 OMP_CLAUSE_DECL (clause) = mem; 9615 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl)); 9616 if (gimplify_omp_ctxp->outer_context) 9617 { 9618 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context; 9619 omp_notice_variable (ctx, decl2, true); 9620 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true); 9621 } 9622 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause), 9623 OMP_CLAUSE_MAP); 9624 OMP_CLAUSE_DECL (nc) = decl; 9625 OMP_CLAUSE_SIZE (nc) = size_zero_node; 9626 if (gimplify_omp_ctxp->target_firstprivatize_array_bases) 9627 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER); 9628 else 9629 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER); 9630 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause); 9631 OMP_CLAUSE_CHAIN (clause) = nc; 9632 } 9633 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases 9634 && lang_hooks.decls.omp_privatize_by_reference (decl)) 9635 { 9636 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl); 9637 OMP_CLAUSE_SIZE (clause) 9638 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))); 9639 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 9640 gimplify_omp_ctxp = ctx->outer_context; 9641 gimplify_expr (&OMP_CLAUSE_SIZE (clause), 9642 pre_p, NULL, is_gimple_val, fb_rvalue); 9643 gimplify_omp_ctxp = ctx; 9644 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause), 9645 OMP_CLAUSE_MAP); 9646 OMP_CLAUSE_DECL (nc) = decl; 9647 OMP_CLAUSE_SIZE (nc) = size_zero_node; 9648 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE); 9649 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause); 9650 OMP_CLAUSE_CHAIN (clause) = nc; 9651 } 9652 else 9653 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl); 9654 } 9655 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0) 9656 { 9657 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE); 9658 OMP_CLAUSE_DECL (nc) = decl; 9659 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1; 9660 OMP_CLAUSE_CHAIN (nc) = chain; 9661 OMP_CLAUSE_CHAIN (clause) = nc; 9662 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 9663 gimplify_omp_ctxp = ctx->outer_context; 9664 lang_hooks.decls.omp_finish_clause (nc, pre_p); 9665 gimplify_omp_ctxp = ctx; 9666 } 9667 *list_p = clause; 9668 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 9669 gimplify_omp_ctxp = ctx->outer_context; 9670 lang_hooks.decls.omp_finish_clause (clause, pre_p); 9671 if (gimplify_omp_ctxp) 9672 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause)) 9673 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP 9674 && DECL_P (OMP_CLAUSE_SIZE (clause))) 9675 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause), 9676 true); 9677 gimplify_omp_ctxp = ctx; 9678 return 0; 9679 } 9680 9681 static void 9682 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p, 9683 enum tree_code code) 9684 { 9685 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 9686 tree c, decl; 9687 9688 if (body) 9689 { 9690 struct gimplify_omp_ctx *octx; 9691 for (octx = ctx; octx; octx = octx->outer_context) 9692 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0) 9693 break; 9694 if (octx) 9695 { 9696 struct walk_stmt_info wi; 9697 memset (&wi, 0, sizeof (wi)); 9698 walk_gimple_seq (body, omp_find_stores_stmt, 9699 omp_find_stores_op, &wi); 9700 } 9701 } 9702 9703 if (ctx->add_safelen1) 9704 { 9705 /* If there are VLAs in the body of simd loop, prevent 9706 vectorization. */ 9707 gcc_assert (ctx->region_type == ORT_SIMD); 9708 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN); 9709 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node; 9710 OMP_CLAUSE_CHAIN (c) = *list_p; 9711 *list_p = c; 9712 list_p = &OMP_CLAUSE_CHAIN (c); 9713 } 9714 9715 while ((c = *list_p) != NULL) 9716 { 9717 splay_tree_node n; 9718 bool remove = false; 9719 9720 switch (OMP_CLAUSE_CODE (c)) 9721 { 9722 case OMP_CLAUSE_FIRSTPRIVATE: 9723 if ((ctx->region_type & ORT_TARGET) 9724 && (ctx->region_type & ORT_ACC) == 0 9725 && TYPE_ATOMIC (strip_array_types 9726 (TREE_TYPE (OMP_CLAUSE_DECL (c))))) 9727 { 9728 error_at (OMP_CLAUSE_LOCATION (c), 9729 "%<_Atomic%> %qD in %<firstprivate%> clause on " 9730 "%<target%> construct", OMP_CLAUSE_DECL (c)); 9731 remove = true; 9732 break; 9733 } 9734 /* FALLTHRU */ 9735 case OMP_CLAUSE_PRIVATE: 9736 case OMP_CLAUSE_SHARED: 9737 case OMP_CLAUSE_LINEAR: 9738 decl = OMP_CLAUSE_DECL (c); 9739 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 9740 remove = !(n->value & GOVD_SEEN); 9741 if (! remove) 9742 { 9743 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED; 9744 if ((n->value & GOVD_DEBUG_PRIVATE) 9745 || lang_hooks.decls.omp_private_debug_clause (decl, shared)) 9746 { 9747 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0 9748 || ((n->value & GOVD_DATA_SHARE_CLASS) 9749 == GOVD_SHARED)); 9750 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE); 9751 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1; 9752 } 9753 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED 9754 && (n->value & GOVD_WRITTEN) == 0 9755 && DECL_P (decl) 9756 && omp_shared_to_firstprivate_optimizable_decl_p (decl)) 9757 OMP_CLAUSE_SHARED_READONLY (c) = 1; 9758 else if (DECL_P (decl) 9759 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED 9760 && (n->value & GOVD_WRITTEN) != 0) 9761 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 9762 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))) 9763 && omp_shared_to_firstprivate_optimizable_decl_p (decl)) 9764 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl); 9765 } 9766 break; 9767 9768 case OMP_CLAUSE_LASTPRIVATE: 9769 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to 9770 accurately reflect the presence of a FIRSTPRIVATE clause. */ 9771 decl = OMP_CLAUSE_DECL (c); 9772 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 9773 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) 9774 = (n->value & GOVD_FIRSTPRIVATE) != 0; 9775 if (code == OMP_DISTRIBUTE 9776 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)) 9777 { 9778 remove = true; 9779 error_at (OMP_CLAUSE_LOCATION (c), 9780 "same variable used in %<firstprivate%> and " 9781 "%<lastprivate%> clauses on %<distribute%> " 9782 "construct"); 9783 } 9784 if (!remove 9785 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 9786 && DECL_P (decl) 9787 && omp_shared_to_firstprivate_optimizable_decl_p (decl)) 9788 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl); 9789 break; 9790 9791 case OMP_CLAUSE_ALIGNED: 9792 decl = OMP_CLAUSE_DECL (c); 9793 if (!is_global_var (decl)) 9794 { 9795 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 9796 remove = n == NULL || !(n->value & GOVD_SEEN); 9797 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE) 9798 { 9799 struct gimplify_omp_ctx *octx; 9800 if (n != NULL 9801 && (n->value & (GOVD_DATA_SHARE_CLASS 9802 & ~GOVD_FIRSTPRIVATE))) 9803 remove = true; 9804 else 9805 for (octx = ctx->outer_context; octx; 9806 octx = octx->outer_context) 9807 { 9808 n = splay_tree_lookup (octx->variables, 9809 (splay_tree_key) decl); 9810 if (n == NULL) 9811 continue; 9812 if (n->value & GOVD_LOCAL) 9813 break; 9814 /* We have to avoid assigning a shared variable 9815 to itself when trying to add 9816 __builtin_assume_aligned. */ 9817 if (n->value & GOVD_SHARED) 9818 { 9819 remove = true; 9820 break; 9821 } 9822 } 9823 } 9824 } 9825 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) 9826 { 9827 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 9828 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0) 9829 remove = true; 9830 } 9831 break; 9832 9833 case OMP_CLAUSE_NONTEMPORAL: 9834 decl = OMP_CLAUSE_DECL (c); 9835 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 9836 remove = n == NULL || !(n->value & GOVD_SEEN); 9837 break; 9838 9839 case OMP_CLAUSE_MAP: 9840 if (code == OMP_TARGET_EXIT_DATA 9841 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER) 9842 { 9843 remove = true; 9844 break; 9845 } 9846 decl = OMP_CLAUSE_DECL (c); 9847 /* Data clauses associated with acc parallel reductions must be 9848 compatible with present_or_copy. Warn and adjust the clause 9849 if that is not the case. */ 9850 if (ctx->region_type == ORT_ACC_PARALLEL) 9851 { 9852 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0); 9853 n = NULL; 9854 9855 if (DECL_P (t)) 9856 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t); 9857 9858 if (n && (n->value & GOVD_REDUCTION)) 9859 { 9860 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c); 9861 9862 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1; 9863 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM 9864 && kind != GOMP_MAP_FORCE_PRESENT 9865 && kind != GOMP_MAP_POINTER) 9866 { 9867 warning_at (OMP_CLAUSE_LOCATION (c), 0, 9868 "incompatible data clause with reduction " 9869 "on %qE; promoting to present_or_copy", 9870 DECL_NAME (t)); 9871 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM); 9872 } 9873 } 9874 } 9875 if (!DECL_P (decl)) 9876 { 9877 if ((ctx->region_type & ORT_TARGET) != 0 9878 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER) 9879 { 9880 if (TREE_CODE (decl) == INDIRECT_REF 9881 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF 9882 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0))) 9883 == REFERENCE_TYPE)) 9884 decl = TREE_OPERAND (decl, 0); 9885 if (TREE_CODE (decl) == COMPONENT_REF) 9886 { 9887 while (TREE_CODE (decl) == COMPONENT_REF) 9888 decl = TREE_OPERAND (decl, 0); 9889 if (DECL_P (decl)) 9890 { 9891 n = splay_tree_lookup (ctx->variables, 9892 (splay_tree_key) decl); 9893 if (!(n->value & GOVD_SEEN)) 9894 remove = true; 9895 } 9896 } 9897 } 9898 break; 9899 } 9900 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 9901 if ((ctx->region_type & ORT_TARGET) != 0 9902 && !(n->value & GOVD_SEEN) 9903 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0 9904 && (!is_global_var (decl) 9905 || !lookup_attribute ("omp declare target link", 9906 DECL_ATTRIBUTES (decl)))) 9907 { 9908 remove = true; 9909 /* For struct element mapping, if struct is never referenced 9910 in target block and none of the mapping has always modifier, 9911 remove all the struct element mappings, which immediately 9912 follow the GOMP_MAP_STRUCT map clause. */ 9913 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT) 9914 { 9915 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c)); 9916 while (cnt--) 9917 OMP_CLAUSE_CHAIN (c) 9918 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c)); 9919 } 9920 } 9921 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT 9922 && code == OMP_TARGET_EXIT_DATA) 9923 remove = true; 9924 else if (DECL_SIZE (decl) 9925 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST 9926 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER 9927 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER 9928 && (OMP_CLAUSE_MAP_KIND (c) 9929 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)) 9930 { 9931 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because 9932 for these, TREE_CODE (DECL_SIZE (decl)) will always be 9933 INTEGER_CST. */ 9934 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR); 9935 9936 tree decl2 = DECL_VALUE_EXPR (decl); 9937 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); 9938 decl2 = TREE_OPERAND (decl2, 0); 9939 gcc_assert (DECL_P (decl2)); 9940 tree mem = build_simple_mem_ref (decl2); 9941 OMP_CLAUSE_DECL (c) = mem; 9942 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl)); 9943 if (ctx->outer_context) 9944 { 9945 omp_notice_variable (ctx->outer_context, decl2, true); 9946 omp_notice_variable (ctx->outer_context, 9947 OMP_CLAUSE_SIZE (c), true); 9948 } 9949 if (((ctx->region_type & ORT_TARGET) != 0 9950 || !ctx->target_firstprivatize_array_bases) 9951 && ((n->value & GOVD_SEEN) == 0 9952 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0)) 9953 { 9954 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c), 9955 OMP_CLAUSE_MAP); 9956 OMP_CLAUSE_DECL (nc) = decl; 9957 OMP_CLAUSE_SIZE (nc) = size_zero_node; 9958 if (ctx->target_firstprivatize_array_bases) 9959 OMP_CLAUSE_SET_MAP_KIND (nc, 9960 GOMP_MAP_FIRSTPRIVATE_POINTER); 9961 else 9962 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER); 9963 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c); 9964 OMP_CLAUSE_CHAIN (c) = nc; 9965 c = nc; 9966 } 9967 } 9968 else 9969 { 9970 if (OMP_CLAUSE_SIZE (c) == NULL_TREE) 9971 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl); 9972 gcc_assert ((n->value & GOVD_SEEN) == 0 9973 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) 9974 == 0)); 9975 } 9976 break; 9977 9978 case OMP_CLAUSE_TO: 9979 case OMP_CLAUSE_FROM: 9980 case OMP_CLAUSE__CACHE_: 9981 decl = OMP_CLAUSE_DECL (c); 9982 if (!DECL_P (decl)) 9983 break; 9984 if (DECL_SIZE (decl) 9985 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 9986 { 9987 tree decl2 = DECL_VALUE_EXPR (decl); 9988 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); 9989 decl2 = TREE_OPERAND (decl2, 0); 9990 gcc_assert (DECL_P (decl2)); 9991 tree mem = build_simple_mem_ref (decl2); 9992 OMP_CLAUSE_DECL (c) = mem; 9993 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl)); 9994 if (ctx->outer_context) 9995 { 9996 omp_notice_variable (ctx->outer_context, decl2, true); 9997 omp_notice_variable (ctx->outer_context, 9998 OMP_CLAUSE_SIZE (c), true); 9999 } 10000 } 10001 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE) 10002 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl); 10003 break; 10004 10005 case OMP_CLAUSE_REDUCTION: 10006 case OMP_CLAUSE_IN_REDUCTION: 10007 case OMP_CLAUSE_TASK_REDUCTION: 10008 decl = OMP_CLAUSE_DECL (c); 10009 /* OpenACC reductions need a present_or_copy data clause. 10010 Add one if necessary. Emit error when the reduction is private. */ 10011 if (ctx->region_type == ORT_ACC_PARALLEL) 10012 { 10013 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 10014 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) 10015 { 10016 remove = true; 10017 error_at (OMP_CLAUSE_LOCATION (c), "invalid private " 10018 "reduction on %qE", DECL_NAME (decl)); 10019 } 10020 else if ((n->value & GOVD_MAP) == 0) 10021 { 10022 tree next = OMP_CLAUSE_CHAIN (c); 10023 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP); 10024 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM); 10025 OMP_CLAUSE_DECL (nc) = decl; 10026 OMP_CLAUSE_CHAIN (c) = nc; 10027 lang_hooks.decls.omp_finish_clause (nc, pre_p); 10028 while (1) 10029 { 10030 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1; 10031 if (OMP_CLAUSE_CHAIN (nc) == NULL) 10032 break; 10033 nc = OMP_CLAUSE_CHAIN (nc); 10034 } 10035 OMP_CLAUSE_CHAIN (nc) = next; 10036 n->value |= GOVD_MAP; 10037 } 10038 } 10039 if (DECL_P (decl) 10040 && omp_shared_to_firstprivate_optimizable_decl_p (decl)) 10041 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl); 10042 break; 10043 case OMP_CLAUSE_COPYIN: 10044 case OMP_CLAUSE_COPYPRIVATE: 10045 case OMP_CLAUSE_IF: 10046 case OMP_CLAUSE_NUM_THREADS: 10047 case OMP_CLAUSE_NUM_TEAMS: 10048 case OMP_CLAUSE_THREAD_LIMIT: 10049 case OMP_CLAUSE_DIST_SCHEDULE: 10050 case OMP_CLAUSE_DEVICE: 10051 case OMP_CLAUSE_SCHEDULE: 10052 case OMP_CLAUSE_NOWAIT: 10053 case OMP_CLAUSE_ORDERED: 10054 case OMP_CLAUSE_DEFAULT: 10055 case OMP_CLAUSE_UNTIED: 10056 case OMP_CLAUSE_COLLAPSE: 10057 case OMP_CLAUSE_FINAL: 10058 case OMP_CLAUSE_MERGEABLE: 10059 case OMP_CLAUSE_PROC_BIND: 10060 case OMP_CLAUSE_SAFELEN: 10061 case OMP_CLAUSE_SIMDLEN: 10062 case OMP_CLAUSE_DEPEND: 10063 case OMP_CLAUSE_PRIORITY: 10064 case OMP_CLAUSE_GRAINSIZE: 10065 case OMP_CLAUSE_NUM_TASKS: 10066 case OMP_CLAUSE_NOGROUP: 10067 case OMP_CLAUSE_THREADS: 10068 case OMP_CLAUSE_SIMD: 10069 case OMP_CLAUSE_HINT: 10070 case OMP_CLAUSE_DEFAULTMAP: 10071 case OMP_CLAUSE_USE_DEVICE_PTR: 10072 case OMP_CLAUSE_IS_DEVICE_PTR: 10073 case OMP_CLAUSE_ASYNC: 10074 case OMP_CLAUSE_WAIT: 10075 case OMP_CLAUSE_INDEPENDENT: 10076 case OMP_CLAUSE_NUM_GANGS: 10077 case OMP_CLAUSE_NUM_WORKERS: 10078 case OMP_CLAUSE_VECTOR_LENGTH: 10079 case OMP_CLAUSE_GANG: 10080 case OMP_CLAUSE_WORKER: 10081 case OMP_CLAUSE_VECTOR: 10082 case OMP_CLAUSE_AUTO: 10083 case OMP_CLAUSE_SEQ: 10084 case OMP_CLAUSE_TILE: 10085 case OMP_CLAUSE_IF_PRESENT: 10086 case OMP_CLAUSE_FINALIZE: 10087 break; 10088 10089 default: 10090 gcc_unreachable (); 10091 } 10092 10093 if (remove) 10094 *list_p = OMP_CLAUSE_CHAIN (c); 10095 else 10096 list_p = &OMP_CLAUSE_CHAIN (c); 10097 } 10098 10099 /* Add in any implicit data sharing. */ 10100 struct gimplify_adjust_omp_clauses_data data; 10101 data.list_p = list_p; 10102 data.pre_p = pre_p; 10103 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data); 10104 10105 gimplify_omp_ctxp = ctx->outer_context; 10106 delete_omp_context (ctx); 10107 } 10108 10109 /* Gimplify OACC_CACHE. */ 10110 10111 static void 10112 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p) 10113 { 10114 tree expr = *expr_p; 10115 10116 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC, 10117 OACC_CACHE); 10118 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr), 10119 OACC_CACHE); 10120 10121 /* TODO: Do something sensible with this information. */ 10122 10123 *expr_p = NULL_TREE; 10124 } 10125 10126 /* Helper function of gimplify_oacc_declare. The helper's purpose is to, 10127 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit' 10128 kind. The entry kind will replace the one in CLAUSE, while the exit 10129 kind will be used in a new omp_clause and returned to the caller. */ 10130 10131 static tree 10132 gimplify_oacc_declare_1 (tree clause) 10133 { 10134 HOST_WIDE_INT kind, new_op; 10135 bool ret = false; 10136 tree c = NULL; 10137 10138 kind = OMP_CLAUSE_MAP_KIND (clause); 10139 10140 switch (kind) 10141 { 10142 case GOMP_MAP_ALLOC: 10143 new_op = GOMP_MAP_RELEASE; 10144 ret = true; 10145 break; 10146 10147 case GOMP_MAP_FROM: 10148 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC); 10149 new_op = GOMP_MAP_FROM; 10150 ret = true; 10151 break; 10152 10153 case GOMP_MAP_TOFROM: 10154 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO); 10155 new_op = GOMP_MAP_FROM; 10156 ret = true; 10157 break; 10158 10159 case GOMP_MAP_DEVICE_RESIDENT: 10160 case GOMP_MAP_FORCE_DEVICEPTR: 10161 case GOMP_MAP_FORCE_PRESENT: 10162 case GOMP_MAP_LINK: 10163 case GOMP_MAP_POINTER: 10164 case GOMP_MAP_TO: 10165 break; 10166 10167 default: 10168 gcc_unreachable (); 10169 break; 10170 } 10171 10172 if (ret) 10173 { 10174 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP); 10175 OMP_CLAUSE_SET_MAP_KIND (c, new_op); 10176 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause); 10177 } 10178 10179 return c; 10180 } 10181 10182 /* Gimplify OACC_DECLARE. */ 10183 10184 static void 10185 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p) 10186 { 10187 tree expr = *expr_p; 10188 gomp_target *stmt; 10189 tree clauses, t, decl; 10190 10191 clauses = OACC_DECLARE_CLAUSES (expr); 10192 10193 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE); 10194 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE); 10195 10196 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t)) 10197 { 10198 decl = OMP_CLAUSE_DECL (t); 10199 10200 if (TREE_CODE (decl) == MEM_REF) 10201 decl = TREE_OPERAND (decl, 0); 10202 10203 if (VAR_P (decl) && !is_oacc_declared (decl)) 10204 { 10205 tree attr = get_identifier ("oacc declare target"); 10206 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE, 10207 DECL_ATTRIBUTES (decl)); 10208 } 10209 10210 if (VAR_P (decl) 10211 && !is_global_var (decl) 10212 && DECL_CONTEXT (decl) == current_function_decl) 10213 { 10214 tree c = gimplify_oacc_declare_1 (t); 10215 if (c) 10216 { 10217 if (oacc_declare_returns == NULL) 10218 oacc_declare_returns = new hash_map<tree, tree>; 10219 10220 oacc_declare_returns->put (decl, c); 10221 } 10222 } 10223 10224 if (gimplify_omp_ctxp) 10225 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN); 10226 } 10227 10228 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE, 10229 clauses); 10230 10231 gimplify_seq_add_stmt (pre_p, stmt); 10232 10233 *expr_p = NULL_TREE; 10234 } 10235 10236 /* Gimplify the contents of an OMP_PARALLEL statement. This involves 10237 gimplification of the body, as well as scanning the body for used 10238 variables. We need to do this scan now, because variable-sized 10239 decls will be decomposed during gimplification. */ 10240 10241 static void 10242 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p) 10243 { 10244 tree expr = *expr_p; 10245 gimple *g; 10246 gimple_seq body = NULL; 10247 10248 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, 10249 OMP_PARALLEL_COMBINED (expr) 10250 ? ORT_COMBINED_PARALLEL 10251 : ORT_PARALLEL, OMP_PARALLEL); 10252 10253 push_gimplify_context (); 10254 10255 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body); 10256 if (gimple_code (g) == GIMPLE_BIND) 10257 pop_gimplify_context (g); 10258 else 10259 pop_gimplify_context (NULL); 10260 10261 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr), 10262 OMP_PARALLEL); 10263 10264 g = gimple_build_omp_parallel (body, 10265 OMP_PARALLEL_CLAUSES (expr), 10266 NULL_TREE, NULL_TREE); 10267 if (OMP_PARALLEL_COMBINED (expr)) 10268 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED); 10269 gimplify_seq_add_stmt (pre_p, g); 10270 *expr_p = NULL_TREE; 10271 } 10272 10273 /* Gimplify the contents of an OMP_TASK statement. This involves 10274 gimplification of the body, as well as scanning the body for used 10275 variables. We need to do this scan now, because variable-sized 10276 decls will be decomposed during gimplification. */ 10277 10278 static void 10279 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p) 10280 { 10281 tree expr = *expr_p; 10282 gimple *g; 10283 gimple_seq body = NULL; 10284 10285 if (OMP_TASK_BODY (expr) == NULL_TREE) 10286 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c)) 10287 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND 10288 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET) 10289 { 10290 error_at (OMP_CLAUSE_LOCATION (c), 10291 "%<mutexinoutset%> kind in %<depend%> clause on a " 10292 "%<taskwait%> construct"); 10293 break; 10294 } 10295 10296 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, 10297 omp_find_clause (OMP_TASK_CLAUSES (expr), 10298 OMP_CLAUSE_UNTIED) 10299 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK); 10300 10301 if (OMP_TASK_BODY (expr)) 10302 { 10303 push_gimplify_context (); 10304 10305 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body); 10306 if (gimple_code (g) == GIMPLE_BIND) 10307 pop_gimplify_context (g); 10308 else 10309 pop_gimplify_context (NULL); 10310 } 10311 10312 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr), 10313 OMP_TASK); 10314 10315 g = gimple_build_omp_task (body, 10316 OMP_TASK_CLAUSES (expr), 10317 NULL_TREE, NULL_TREE, 10318 NULL_TREE, NULL_TREE, NULL_TREE); 10319 if (OMP_TASK_BODY (expr) == NULL_TREE) 10320 gimple_omp_task_set_taskwait_p (g, true); 10321 gimplify_seq_add_stmt (pre_p, g); 10322 *expr_p = NULL_TREE; 10323 } 10324 10325 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD 10326 with non-NULL OMP_FOR_INIT. Also, fill in pdata array, 10327 pdata[0] non-NULL if there is anything non-trivial in between, pdata[1] 10328 is address of OMP_PARALLEL in between if any, pdata[2] is address of 10329 OMP_FOR in between if any and pdata[3] is address of the inner 10330 OMP_FOR/OMP_SIMD. */ 10331 10332 static tree 10333 find_combined_omp_for (tree *tp, int *walk_subtrees, void *data) 10334 { 10335 tree **pdata = (tree **) data; 10336 *walk_subtrees = 0; 10337 switch (TREE_CODE (*tp)) 10338 { 10339 case OMP_FOR: 10340 if (OMP_FOR_INIT (*tp) != NULL_TREE) 10341 { 10342 pdata[3] = tp; 10343 return *tp; 10344 } 10345 pdata[2] = tp; 10346 *walk_subtrees = 1; 10347 break; 10348 case OMP_SIMD: 10349 if (OMP_FOR_INIT (*tp) != NULL_TREE) 10350 { 10351 pdata[3] = tp; 10352 return *tp; 10353 } 10354 break; 10355 case BIND_EXPR: 10356 if (BIND_EXPR_VARS (*tp) 10357 || (BIND_EXPR_BLOCK (*tp) 10358 && BLOCK_VARS (BIND_EXPR_BLOCK (*tp)))) 10359 pdata[0] = tp; 10360 *walk_subtrees = 1; 10361 break; 10362 case STATEMENT_LIST: 10363 if (!tsi_one_before_end_p (tsi_start (*tp))) 10364 pdata[0] = tp; 10365 *walk_subtrees = 1; 10366 break; 10367 case TRY_FINALLY_EXPR: 10368 pdata[0] = tp; 10369 *walk_subtrees = 1; 10370 break; 10371 case OMP_PARALLEL: 10372 pdata[1] = tp; 10373 *walk_subtrees = 1; 10374 break; 10375 default: 10376 break; 10377 } 10378 return NULL_TREE; 10379 } 10380 10381 /* Gimplify the gross structure of an OMP_FOR statement. */ 10382 10383 static enum gimplify_status 10384 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p) 10385 { 10386 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t; 10387 enum gimplify_status ret = GS_ALL_DONE; 10388 enum gimplify_status tret; 10389 gomp_for *gfor; 10390 gimple_seq for_body, for_pre_body; 10391 int i; 10392 bitmap has_decl_expr = NULL; 10393 enum omp_region_type ort = ORT_WORKSHARE; 10394 10395 orig_for_stmt = for_stmt = *expr_p; 10396 10397 if (OMP_FOR_INIT (for_stmt) == NULL_TREE) 10398 { 10399 tree *data[4] = { NULL, NULL, NULL, NULL }; 10400 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP); 10401 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), 10402 find_combined_omp_for, data, NULL); 10403 if (inner_for_stmt == NULL_TREE) 10404 { 10405 gcc_assert (seen_error ()); 10406 *expr_p = NULL_TREE; 10407 return GS_ERROR; 10408 } 10409 if (data[2] && OMP_FOR_PRE_BODY (*data[2])) 10410 { 10411 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]), 10412 &OMP_FOR_PRE_BODY (for_stmt)); 10413 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE; 10414 } 10415 if (OMP_FOR_PRE_BODY (inner_for_stmt)) 10416 { 10417 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt), 10418 &OMP_FOR_PRE_BODY (for_stmt)); 10419 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE; 10420 } 10421 10422 if (data[0]) 10423 { 10424 /* We have some statements or variable declarations in between 10425 the composite construct directives. Move them around the 10426 inner_for_stmt. */ 10427 data[0] = expr_p; 10428 for (i = 0; i < 3; i++) 10429 if (data[i]) 10430 { 10431 tree t = *data[i]; 10432 if (i < 2 && data[i + 1] == &OMP_BODY (t)) 10433 data[i + 1] = data[i]; 10434 *data[i] = OMP_BODY (t); 10435 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE, 10436 NULL_TREE, make_node (BLOCK)); 10437 OMP_BODY (t) = body; 10438 append_to_statement_list_force (inner_for_stmt, 10439 &BIND_EXPR_BODY (body)); 10440 *data[3] = t; 10441 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body))); 10442 gcc_assert (*data[3] == inner_for_stmt); 10443 } 10444 return GS_OK; 10445 } 10446 10447 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++) 10448 if (OMP_FOR_ORIG_DECLS (inner_for_stmt) 10449 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), 10450 i)) == TREE_LIST 10451 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), 10452 i))) 10453 { 10454 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i); 10455 /* Class iterators aren't allowed on OMP_SIMD, so the only 10456 case we need to solve is distribute parallel for. */ 10457 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR 10458 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE 10459 && data[1]); 10460 tree orig_decl = TREE_PURPOSE (orig); 10461 tree last = TREE_VALUE (orig); 10462 tree *pc; 10463 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt); 10464 *pc; pc = &OMP_CLAUSE_CHAIN (*pc)) 10465 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE 10466 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE) 10467 && OMP_CLAUSE_DECL (*pc) == orig_decl) 10468 break; 10469 if (*pc == NULL_TREE) 10470 { 10471 tree *spc; 10472 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]); 10473 *spc; spc = &OMP_CLAUSE_CHAIN (*spc)) 10474 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE 10475 && OMP_CLAUSE_DECL (*spc) == orig_decl) 10476 break; 10477 if (*spc) 10478 { 10479 tree c = *spc; 10480 *spc = OMP_CLAUSE_CHAIN (c); 10481 OMP_CLAUSE_CHAIN (c) = NULL_TREE; 10482 *pc = c; 10483 } 10484 } 10485 if (*pc == NULL_TREE) 10486 ; 10487 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE) 10488 { 10489 /* private clause will appear only on inner_for_stmt. 10490 Change it into firstprivate, and add private clause 10491 on for_stmt. */ 10492 tree c = copy_node (*pc); 10493 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt); 10494 OMP_FOR_CLAUSES (for_stmt) = c; 10495 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE; 10496 lang_hooks.decls.omp_finish_clause (*pc, pre_p); 10497 } 10498 else 10499 { 10500 /* lastprivate clause will appear on both inner_for_stmt 10501 and for_stmt. Add firstprivate clause to 10502 inner_for_stmt. */ 10503 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc), 10504 OMP_CLAUSE_FIRSTPRIVATE); 10505 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc); 10506 OMP_CLAUSE_CHAIN (c) = *pc; 10507 *pc = c; 10508 lang_hooks.decls.omp_finish_clause (*pc, pre_p); 10509 } 10510 tree c = build_omp_clause (UNKNOWN_LOCATION, 10511 OMP_CLAUSE_FIRSTPRIVATE); 10512 OMP_CLAUSE_DECL (c) = last; 10513 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]); 10514 OMP_PARALLEL_CLAUSES (*data[1]) = c; 10515 c = build_omp_clause (UNKNOWN_LOCATION, 10516 *pc ? OMP_CLAUSE_SHARED 10517 : OMP_CLAUSE_FIRSTPRIVATE); 10518 OMP_CLAUSE_DECL (c) = orig_decl; 10519 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]); 10520 OMP_PARALLEL_CLAUSES (*data[1]) = c; 10521 } 10522 /* Similarly, take care of C++ range for temporaries, those should 10523 be firstprivate on OMP_PARALLEL if any. */ 10524 if (data[1]) 10525 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++) 10526 if (OMP_FOR_ORIG_DECLS (inner_for_stmt) 10527 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), 10528 i)) == TREE_LIST 10529 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), 10530 i))) 10531 { 10532 tree orig 10533 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i); 10534 tree v = TREE_CHAIN (orig); 10535 tree c = build_omp_clause (UNKNOWN_LOCATION, 10536 OMP_CLAUSE_FIRSTPRIVATE); 10537 /* First add firstprivate clause for the __for_end artificial 10538 decl. */ 10539 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1); 10540 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c))) 10541 == REFERENCE_TYPE) 10542 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1; 10543 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]); 10544 OMP_PARALLEL_CLAUSES (*data[1]) = c; 10545 if (TREE_VEC_ELT (v, 0)) 10546 { 10547 /* And now the same for __for_range artificial decl if it 10548 exists. */ 10549 c = build_omp_clause (UNKNOWN_LOCATION, 10550 OMP_CLAUSE_FIRSTPRIVATE); 10551 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0); 10552 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c))) 10553 == REFERENCE_TYPE) 10554 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1; 10555 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]); 10556 OMP_PARALLEL_CLAUSES (*data[1]) = c; 10557 } 10558 } 10559 } 10560 10561 switch (TREE_CODE (for_stmt)) 10562 { 10563 case OMP_FOR: 10564 case OMP_DISTRIBUTE: 10565 break; 10566 case OACC_LOOP: 10567 ort = ORT_ACC; 10568 break; 10569 case OMP_TASKLOOP: 10570 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED)) 10571 ort = ORT_UNTIED_TASKLOOP; 10572 else 10573 ort = ORT_TASKLOOP; 10574 break; 10575 case OMP_SIMD: 10576 ort = ORT_SIMD; 10577 break; 10578 default: 10579 gcc_unreachable (); 10580 } 10581 10582 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear 10583 clause for the IV. */ 10584 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1) 10585 { 10586 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0); 10587 gcc_assert (TREE_CODE (t) == MODIFY_EXPR); 10588 decl = TREE_OPERAND (t, 0); 10589 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c)) 10590 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 10591 && OMP_CLAUSE_DECL (c) == decl) 10592 { 10593 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1; 10594 break; 10595 } 10596 } 10597 10598 if (TREE_CODE (for_stmt) != OMP_TASKLOOP) 10599 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort, 10600 TREE_CODE (for_stmt)); 10601 10602 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE) 10603 gimplify_omp_ctxp->distribute = true; 10604 10605 /* Handle OMP_FOR_INIT. */ 10606 for_pre_body = NULL; 10607 if ((ort == ORT_SIMD 10608 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD)) 10609 && OMP_FOR_PRE_BODY (for_stmt)) 10610 { 10611 has_decl_expr = BITMAP_ALLOC (NULL); 10612 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR 10613 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt))) 10614 == VAR_DECL) 10615 { 10616 t = OMP_FOR_PRE_BODY (for_stmt); 10617 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t))); 10618 } 10619 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST) 10620 { 10621 tree_stmt_iterator si; 10622 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si); 10623 tsi_next (&si)) 10624 { 10625 t = tsi_stmt (si); 10626 if (TREE_CODE (t) == DECL_EXPR 10627 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL) 10628 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t))); 10629 } 10630 } 10631 } 10632 if (OMP_FOR_PRE_BODY (for_stmt)) 10633 { 10634 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp) 10635 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body); 10636 else 10637 { 10638 struct gimplify_omp_ctx ctx; 10639 memset (&ctx, 0, sizeof (ctx)); 10640 ctx.region_type = ORT_NONE; 10641 gimplify_omp_ctxp = &ctx; 10642 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body); 10643 gimplify_omp_ctxp = NULL; 10644 } 10645 } 10646 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE; 10647 10648 if (OMP_FOR_INIT (for_stmt) == NULL_TREE) 10649 for_stmt = inner_for_stmt; 10650 10651 /* For taskloop, need to gimplify the start, end and step before the 10652 taskloop, outside of the taskloop omp context. */ 10653 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP) 10654 { 10655 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) 10656 { 10657 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); 10658 if (!is_gimple_constant (TREE_OPERAND (t, 1))) 10659 { 10660 tree type = TREE_TYPE (TREE_OPERAND (t, 0)); 10661 TREE_OPERAND (t, 1) 10662 = get_initialized_tmp_var (TREE_OPERAND (t, 1), 10663 gimple_seq_empty_p (for_pre_body) 10664 ? pre_p : &for_pre_body, NULL, 10665 false); 10666 /* Reference to pointer conversion is considered useless, 10667 but is significant for firstprivate clause. Force it 10668 here. */ 10669 if (TREE_CODE (type) == POINTER_TYPE 10670 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1))) 10671 == REFERENCE_TYPE)) 10672 { 10673 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type)); 10674 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, 10675 TREE_OPERAND (t, 1)); 10676 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body) 10677 ? pre_p : &for_pre_body); 10678 TREE_OPERAND (t, 1) = v; 10679 } 10680 tree c = build_omp_clause (input_location, 10681 OMP_CLAUSE_FIRSTPRIVATE); 10682 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1); 10683 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt); 10684 OMP_FOR_CLAUSES (orig_for_stmt) = c; 10685 } 10686 10687 /* Handle OMP_FOR_COND. */ 10688 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i); 10689 if (!is_gimple_constant (TREE_OPERAND (t, 1))) 10690 { 10691 tree type = TREE_TYPE (TREE_OPERAND (t, 0)); 10692 TREE_OPERAND (t, 1) 10693 = get_initialized_tmp_var (TREE_OPERAND (t, 1), 10694 gimple_seq_empty_p (for_pre_body) 10695 ? pre_p : &for_pre_body, NULL, 10696 false); 10697 /* Reference to pointer conversion is considered useless, 10698 but is significant for firstprivate clause. Force it 10699 here. */ 10700 if (TREE_CODE (type) == POINTER_TYPE 10701 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1))) 10702 == REFERENCE_TYPE)) 10703 { 10704 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type)); 10705 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, 10706 TREE_OPERAND (t, 1)); 10707 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body) 10708 ? pre_p : &for_pre_body); 10709 TREE_OPERAND (t, 1) = v; 10710 } 10711 tree c = build_omp_clause (input_location, 10712 OMP_CLAUSE_FIRSTPRIVATE); 10713 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1); 10714 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt); 10715 OMP_FOR_CLAUSES (orig_for_stmt) = c; 10716 } 10717 10718 /* Handle OMP_FOR_INCR. */ 10719 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 10720 if (TREE_CODE (t) == MODIFY_EXPR) 10721 { 10722 decl = TREE_OPERAND (t, 0); 10723 t = TREE_OPERAND (t, 1); 10724 tree *tp = &TREE_OPERAND (t, 1); 10725 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl) 10726 tp = &TREE_OPERAND (t, 0); 10727 10728 if (!is_gimple_constant (*tp)) 10729 { 10730 gimple_seq *seq = gimple_seq_empty_p (for_pre_body) 10731 ? pre_p : &for_pre_body; 10732 *tp = get_initialized_tmp_var (*tp, seq, NULL, false); 10733 tree c = build_omp_clause (input_location, 10734 OMP_CLAUSE_FIRSTPRIVATE); 10735 OMP_CLAUSE_DECL (c) = *tp; 10736 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt); 10737 OMP_FOR_CLAUSES (orig_for_stmt) = c; 10738 } 10739 } 10740 } 10741 10742 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort, 10743 OMP_TASKLOOP); 10744 } 10745 10746 if (orig_for_stmt != for_stmt) 10747 gimplify_omp_ctxp->combined_loop = true; 10748 10749 for_body = NULL; 10750 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) 10751 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt))); 10752 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) 10753 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt))); 10754 10755 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED); 10756 bool is_doacross = false; 10757 if (c && OMP_CLAUSE_ORDERED_EXPR (c)) 10758 { 10759 is_doacross = true; 10760 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH 10761 (OMP_FOR_INIT (for_stmt)) 10762 * 2); 10763 } 10764 int collapse = 1, tile = 0; 10765 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE); 10766 if (c) 10767 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c)); 10768 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE); 10769 if (c) 10770 tile = list_length (OMP_CLAUSE_TILE_LIST (c)); 10771 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) 10772 { 10773 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); 10774 gcc_assert (TREE_CODE (t) == MODIFY_EXPR); 10775 decl = TREE_OPERAND (t, 0); 10776 gcc_assert (DECL_P (decl)); 10777 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)) 10778 || POINTER_TYPE_P (TREE_TYPE (decl))); 10779 if (is_doacross) 10780 { 10781 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt)) 10782 { 10783 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i); 10784 if (TREE_CODE (orig_decl) == TREE_LIST) 10785 { 10786 orig_decl = TREE_PURPOSE (orig_decl); 10787 if (!orig_decl) 10788 orig_decl = decl; 10789 } 10790 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl); 10791 } 10792 else 10793 gimplify_omp_ctxp->loop_iter_var.quick_push (decl); 10794 gimplify_omp_ctxp->loop_iter_var.quick_push (decl); 10795 } 10796 10797 /* Make sure the iteration variable is private. */ 10798 tree c = NULL_TREE; 10799 tree c2 = NULL_TREE; 10800 if (orig_for_stmt != for_stmt) 10801 { 10802 /* Preserve this information until we gimplify the inner simd. */ 10803 if (has_decl_expr 10804 && bitmap_bit_p (has_decl_expr, DECL_UID (decl))) 10805 TREE_PRIVATE (t) = 1; 10806 } 10807 else if (ort == ORT_SIMD) 10808 { 10809 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables, 10810 (splay_tree_key) decl); 10811 omp_is_private (gimplify_omp_ctxp, decl, 10812 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) 10813 != 1)); 10814 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0) 10815 omp_notice_variable (gimplify_omp_ctxp, decl, true); 10816 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1) 10817 { 10818 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR); 10819 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1; 10820 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN; 10821 if ((has_decl_expr 10822 && bitmap_bit_p (has_decl_expr, DECL_UID (decl))) 10823 || TREE_PRIVATE (t)) 10824 { 10825 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1; 10826 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER; 10827 } 10828 struct gimplify_omp_ctx *outer 10829 = gimplify_omp_ctxp->outer_context; 10830 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 10831 { 10832 if (outer->region_type == ORT_WORKSHARE 10833 && outer->combined_loop) 10834 { 10835 n = splay_tree_lookup (outer->variables, 10836 (splay_tree_key)decl); 10837 if (n != NULL && (n->value & GOVD_LOCAL) != 0) 10838 { 10839 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1; 10840 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER; 10841 } 10842 else 10843 { 10844 struct gimplify_omp_ctx *octx = outer->outer_context; 10845 if (octx 10846 && octx->region_type == ORT_COMBINED_PARALLEL 10847 && octx->outer_context 10848 && (octx->outer_context->region_type 10849 == ORT_WORKSHARE) 10850 && octx->outer_context->combined_loop) 10851 { 10852 octx = octx->outer_context; 10853 n = splay_tree_lookup (octx->variables, 10854 (splay_tree_key)decl); 10855 if (n != NULL && (n->value & GOVD_LOCAL) != 0) 10856 { 10857 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1; 10858 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER; 10859 } 10860 } 10861 } 10862 } 10863 } 10864 10865 OMP_CLAUSE_DECL (c) = decl; 10866 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt); 10867 OMP_FOR_CLAUSES (for_stmt) = c; 10868 omp_add_variable (gimplify_omp_ctxp, decl, flags); 10869 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 10870 { 10871 if (outer->region_type == ORT_WORKSHARE 10872 && outer->combined_loop) 10873 { 10874 if (outer->outer_context 10875 && (outer->outer_context->region_type 10876 == ORT_COMBINED_PARALLEL)) 10877 outer = outer->outer_context; 10878 else if (omp_check_private (outer, decl, false)) 10879 outer = NULL; 10880 } 10881 else if (((outer->region_type & ORT_TASKLOOP) 10882 == ORT_TASKLOOP) 10883 && outer->combined_loop 10884 && !omp_check_private (gimplify_omp_ctxp, 10885 decl, false)) 10886 ; 10887 else if (outer->region_type != ORT_COMBINED_PARALLEL) 10888 { 10889 omp_notice_variable (outer, decl, true); 10890 outer = NULL; 10891 } 10892 if (outer) 10893 { 10894 n = splay_tree_lookup (outer->variables, 10895 (splay_tree_key)decl); 10896 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0) 10897 { 10898 omp_add_variable (outer, decl, 10899 GOVD_LASTPRIVATE | GOVD_SEEN); 10900 if (outer->region_type == ORT_COMBINED_PARALLEL 10901 && outer->outer_context 10902 && (outer->outer_context->region_type 10903 == ORT_WORKSHARE) 10904 && outer->outer_context->combined_loop) 10905 { 10906 outer = outer->outer_context; 10907 n = splay_tree_lookup (outer->variables, 10908 (splay_tree_key)decl); 10909 if (omp_check_private (outer, decl, false)) 10910 outer = NULL; 10911 else if (n == NULL 10912 || ((n->value & GOVD_DATA_SHARE_CLASS) 10913 == 0)) 10914 omp_add_variable (outer, decl, 10915 GOVD_LASTPRIVATE 10916 | GOVD_SEEN); 10917 else 10918 outer = NULL; 10919 } 10920 if (outer && outer->outer_context 10921 && ((outer->outer_context->region_type 10922 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS 10923 || (((outer->region_type & ORT_TASKLOOP) 10924 == ORT_TASKLOOP) 10925 && (outer->outer_context->region_type 10926 == ORT_COMBINED_PARALLEL)))) 10927 { 10928 outer = outer->outer_context; 10929 n = splay_tree_lookup (outer->variables, 10930 (splay_tree_key)decl); 10931 if (n == NULL 10932 || (n->value & GOVD_DATA_SHARE_CLASS) == 0) 10933 omp_add_variable (outer, decl, 10934 GOVD_SHARED | GOVD_SEEN); 10935 else 10936 outer = NULL; 10937 } 10938 if (outer && outer->outer_context) 10939 omp_notice_variable (outer->outer_context, decl, 10940 true); 10941 } 10942 } 10943 } 10944 } 10945 else 10946 { 10947 bool lastprivate 10948 = (!has_decl_expr 10949 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl))); 10950 if (TREE_PRIVATE (t)) 10951 lastprivate = false; 10952 struct gimplify_omp_ctx *outer 10953 = gimplify_omp_ctxp->outer_context; 10954 if (outer && lastprivate) 10955 { 10956 if (outer->region_type == ORT_WORKSHARE 10957 && outer->combined_loop) 10958 { 10959 n = splay_tree_lookup (outer->variables, 10960 (splay_tree_key)decl); 10961 if (n != NULL && (n->value & GOVD_LOCAL) != 0) 10962 { 10963 lastprivate = false; 10964 outer = NULL; 10965 } 10966 else if (outer->outer_context 10967 && (outer->outer_context->region_type 10968 == ORT_COMBINED_PARALLEL)) 10969 outer = outer->outer_context; 10970 else if (omp_check_private (outer, decl, false)) 10971 outer = NULL; 10972 } 10973 else if (((outer->region_type & ORT_TASKLOOP) 10974 == ORT_TASKLOOP) 10975 && outer->combined_loop 10976 && !omp_check_private (gimplify_omp_ctxp, 10977 decl, false)) 10978 ; 10979 else if (outer->region_type != ORT_COMBINED_PARALLEL) 10980 { 10981 omp_notice_variable (outer, decl, true); 10982 outer = NULL; 10983 } 10984 if (outer) 10985 { 10986 n = splay_tree_lookup (outer->variables, 10987 (splay_tree_key)decl); 10988 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0) 10989 { 10990 omp_add_variable (outer, decl, 10991 GOVD_LASTPRIVATE | GOVD_SEEN); 10992 if (outer->region_type == ORT_COMBINED_PARALLEL 10993 && outer->outer_context 10994 && (outer->outer_context->region_type 10995 == ORT_WORKSHARE) 10996 && outer->outer_context->combined_loop) 10997 { 10998 outer = outer->outer_context; 10999 n = splay_tree_lookup (outer->variables, 11000 (splay_tree_key)decl); 11001 if (omp_check_private (outer, decl, false)) 11002 outer = NULL; 11003 else if (n == NULL 11004 || ((n->value & GOVD_DATA_SHARE_CLASS) 11005 == 0)) 11006 omp_add_variable (outer, decl, 11007 GOVD_LASTPRIVATE 11008 | GOVD_SEEN); 11009 else 11010 outer = NULL; 11011 } 11012 if (outer && outer->outer_context 11013 && ((outer->outer_context->region_type 11014 & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS 11015 || (((outer->region_type & ORT_TASKLOOP) 11016 == ORT_TASKLOOP) 11017 && (outer->outer_context->region_type 11018 == ORT_COMBINED_PARALLEL)))) 11019 { 11020 outer = outer->outer_context; 11021 n = splay_tree_lookup (outer->variables, 11022 (splay_tree_key)decl); 11023 if (n == NULL 11024 || (n->value & GOVD_DATA_SHARE_CLASS) == 0) 11025 omp_add_variable (outer, decl, 11026 GOVD_SHARED | GOVD_SEEN); 11027 else 11028 outer = NULL; 11029 } 11030 if (outer && outer->outer_context) 11031 omp_notice_variable (outer->outer_context, decl, 11032 true); 11033 } 11034 } 11035 } 11036 11037 c = build_omp_clause (input_location, 11038 lastprivate ? OMP_CLAUSE_LASTPRIVATE 11039 : OMP_CLAUSE_PRIVATE); 11040 OMP_CLAUSE_DECL (c) = decl; 11041 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt); 11042 OMP_FOR_CLAUSES (for_stmt) = c; 11043 omp_add_variable (gimplify_omp_ctxp, decl, 11044 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE) 11045 | GOVD_EXPLICIT | GOVD_SEEN); 11046 c = NULL_TREE; 11047 } 11048 } 11049 else if (omp_is_private (gimplify_omp_ctxp, decl, 0)) 11050 omp_notice_variable (gimplify_omp_ctxp, decl, true); 11051 else 11052 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN); 11053 11054 /* If DECL is not a gimple register, create a temporary variable to act 11055 as an iteration counter. This is valid, since DECL cannot be 11056 modified in the body of the loop. Similarly for any iteration vars 11057 in simd with collapse > 1 where the iterator vars must be 11058 lastprivate. */ 11059 if (orig_for_stmt != for_stmt) 11060 var = decl; 11061 else if (!is_gimple_reg (decl) 11062 || (ort == ORT_SIMD 11063 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)) 11064 { 11065 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 11066 /* Make sure omp_add_variable is not called on it prematurely. 11067 We call it ourselves a few lines later. */ 11068 gimplify_omp_ctxp = NULL; 11069 var = create_tmp_var (TREE_TYPE (decl), get_name (decl)); 11070 gimplify_omp_ctxp = ctx; 11071 TREE_OPERAND (t, 0) = var; 11072 11073 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var)); 11074 11075 if (ort == ORT_SIMD 11076 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1) 11077 { 11078 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR); 11079 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1; 11080 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1; 11081 OMP_CLAUSE_DECL (c2) = var; 11082 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt); 11083 OMP_FOR_CLAUSES (for_stmt) = c2; 11084 omp_add_variable (gimplify_omp_ctxp, var, 11085 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN); 11086 if (c == NULL_TREE) 11087 { 11088 c = c2; 11089 c2 = NULL_TREE; 11090 } 11091 } 11092 else 11093 omp_add_variable (gimplify_omp_ctxp, var, 11094 GOVD_PRIVATE | GOVD_SEEN); 11095 } 11096 else 11097 var = decl; 11098 11099 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, 11100 is_gimple_val, fb_rvalue, false); 11101 ret = MIN (ret, tret); 11102 if (ret == GS_ERROR) 11103 return ret; 11104 11105 /* Handle OMP_FOR_COND. */ 11106 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i); 11107 gcc_assert (COMPARISON_CLASS_P (t)); 11108 gcc_assert (TREE_OPERAND (t, 0) == decl); 11109 11110 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, 11111 is_gimple_val, fb_rvalue, false); 11112 ret = MIN (ret, tret); 11113 11114 /* Handle OMP_FOR_INCR. */ 11115 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 11116 switch (TREE_CODE (t)) 11117 { 11118 case PREINCREMENT_EXPR: 11119 case POSTINCREMENT_EXPR: 11120 { 11121 tree decl = TREE_OPERAND (t, 0); 11122 /* c_omp_for_incr_canonicalize_ptr() should have been 11123 called to massage things appropriately. */ 11124 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl))); 11125 11126 if (orig_for_stmt != for_stmt) 11127 break; 11128 t = build_int_cst (TREE_TYPE (decl), 1); 11129 if (c) 11130 OMP_CLAUSE_LINEAR_STEP (c) = t; 11131 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t); 11132 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t); 11133 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t; 11134 break; 11135 } 11136 11137 case PREDECREMENT_EXPR: 11138 case POSTDECREMENT_EXPR: 11139 /* c_omp_for_incr_canonicalize_ptr() should have been 11140 called to massage things appropriately. */ 11141 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl))); 11142 if (orig_for_stmt != for_stmt) 11143 break; 11144 t = build_int_cst (TREE_TYPE (decl), -1); 11145 if (c) 11146 OMP_CLAUSE_LINEAR_STEP (c) = t; 11147 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t); 11148 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t); 11149 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t; 11150 break; 11151 11152 case MODIFY_EXPR: 11153 gcc_assert (TREE_OPERAND (t, 0) == decl); 11154 TREE_OPERAND (t, 0) = var; 11155 11156 t = TREE_OPERAND (t, 1); 11157 switch (TREE_CODE (t)) 11158 { 11159 case PLUS_EXPR: 11160 if (TREE_OPERAND (t, 1) == decl) 11161 { 11162 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0); 11163 TREE_OPERAND (t, 0) = var; 11164 break; 11165 } 11166 11167 /* Fallthru. */ 11168 case MINUS_EXPR: 11169 case POINTER_PLUS_EXPR: 11170 gcc_assert (TREE_OPERAND (t, 0) == decl); 11171 TREE_OPERAND (t, 0) = var; 11172 break; 11173 default: 11174 gcc_unreachable (); 11175 } 11176 11177 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, 11178 is_gimple_val, fb_rvalue, false); 11179 ret = MIN (ret, tret); 11180 if (c) 11181 { 11182 tree step = TREE_OPERAND (t, 1); 11183 tree stept = TREE_TYPE (decl); 11184 if (POINTER_TYPE_P (stept)) 11185 stept = sizetype; 11186 step = fold_convert (stept, step); 11187 if (TREE_CODE (t) == MINUS_EXPR) 11188 step = fold_build1 (NEGATE_EXPR, stept, step); 11189 OMP_CLAUSE_LINEAR_STEP (c) = step; 11190 if (step != TREE_OPERAND (t, 1)) 11191 { 11192 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), 11193 &for_pre_body, NULL, 11194 is_gimple_val, fb_rvalue, false); 11195 ret = MIN (ret, tret); 11196 } 11197 } 11198 break; 11199 11200 default: 11201 gcc_unreachable (); 11202 } 11203 11204 if (c2) 11205 { 11206 gcc_assert (c); 11207 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c); 11208 } 11209 11210 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt) 11211 { 11212 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c)) 11213 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 11214 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL) 11215 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 11216 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c) 11217 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL)) 11218 && OMP_CLAUSE_DECL (c) == decl) 11219 { 11220 if (is_doacross && (collapse == 1 || i >= collapse)) 11221 t = var; 11222 else 11223 { 11224 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 11225 gcc_assert (TREE_CODE (t) == MODIFY_EXPR); 11226 gcc_assert (TREE_OPERAND (t, 0) == var); 11227 t = TREE_OPERAND (t, 1); 11228 gcc_assert (TREE_CODE (t) == PLUS_EXPR 11229 || TREE_CODE (t) == MINUS_EXPR 11230 || TREE_CODE (t) == POINTER_PLUS_EXPR); 11231 gcc_assert (TREE_OPERAND (t, 0) == var); 11232 t = build2 (TREE_CODE (t), TREE_TYPE (decl), 11233 is_doacross ? var : decl, 11234 TREE_OPERAND (t, 1)); 11235 } 11236 gimple_seq *seq; 11237 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE) 11238 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c); 11239 else 11240 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c); 11241 push_gimplify_context (); 11242 gimplify_assign (decl, t, seq); 11243 gimple *bind = NULL; 11244 if (gimplify_ctxp->temps) 11245 { 11246 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE); 11247 *seq = NULL; 11248 gimplify_seq_add_stmt (seq, bind); 11249 } 11250 pop_gimplify_context (bind); 11251 } 11252 } 11253 } 11254 11255 BITMAP_FREE (has_decl_expr); 11256 11257 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP) 11258 { 11259 push_gimplify_context (); 11260 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR) 11261 { 11262 OMP_FOR_BODY (orig_for_stmt) 11263 = build3 (BIND_EXPR, void_type_node, NULL, 11264 OMP_FOR_BODY (orig_for_stmt), NULL); 11265 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1; 11266 } 11267 } 11268 11269 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt), 11270 &for_body); 11271 11272 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP) 11273 { 11274 if (gimple_code (g) == GIMPLE_BIND) 11275 pop_gimplify_context (g); 11276 else 11277 pop_gimplify_context (NULL); 11278 } 11279 11280 if (orig_for_stmt != for_stmt) 11281 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) 11282 { 11283 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); 11284 decl = TREE_OPERAND (t, 0); 11285 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 11286 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP) 11287 gimplify_omp_ctxp = ctx->outer_context; 11288 var = create_tmp_var (TREE_TYPE (decl), get_name (decl)); 11289 gimplify_omp_ctxp = ctx; 11290 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN); 11291 TREE_OPERAND (t, 0) = var; 11292 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 11293 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1)); 11294 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var; 11295 } 11296 11297 gimplify_adjust_omp_clauses (pre_p, for_body, 11298 &OMP_FOR_CLAUSES (orig_for_stmt), 11299 TREE_CODE (orig_for_stmt)); 11300 11301 int kind; 11302 switch (TREE_CODE (orig_for_stmt)) 11303 { 11304 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break; 11305 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break; 11306 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break; 11307 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break; 11308 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break; 11309 default: 11310 gcc_unreachable (); 11311 } 11312 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt), 11313 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)), 11314 for_pre_body); 11315 if (orig_for_stmt != for_stmt) 11316 gimple_omp_for_set_combined_p (gfor, true); 11317 if (gimplify_omp_ctxp 11318 && (gimplify_omp_ctxp->combined_loop 11319 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL 11320 && gimplify_omp_ctxp->outer_context 11321 && gimplify_omp_ctxp->outer_context->combined_loop))) 11322 { 11323 gimple_omp_for_set_combined_into_p (gfor, true); 11324 if (gimplify_omp_ctxp->combined_loop) 11325 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD); 11326 else 11327 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR); 11328 } 11329 11330 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) 11331 { 11332 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); 11333 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0)); 11334 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1)); 11335 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i); 11336 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t)); 11337 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1)); 11338 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 11339 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1)); 11340 } 11341 11342 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop 11343 constructs with GIMPLE_OMP_TASK sandwiched in between them. 11344 The outer taskloop stands for computing the number of iterations, 11345 counts for collapsed loops and holding taskloop specific clauses. 11346 The task construct stands for the effect of data sharing on the 11347 explicit task it creates and the inner taskloop stands for expansion 11348 of the static loop inside of the explicit task construct. */ 11349 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP) 11350 { 11351 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor); 11352 tree task_clauses = NULL_TREE; 11353 tree c = *gfor_clauses_ptr; 11354 tree *gtask_clauses_ptr = &task_clauses; 11355 tree outer_for_clauses = NULL_TREE; 11356 tree *gforo_clauses_ptr = &outer_for_clauses; 11357 for (; c; c = OMP_CLAUSE_CHAIN (c)) 11358 switch (OMP_CLAUSE_CODE (c)) 11359 { 11360 /* These clauses are allowed on task, move them there. */ 11361 case OMP_CLAUSE_SHARED: 11362 case OMP_CLAUSE_FIRSTPRIVATE: 11363 case OMP_CLAUSE_DEFAULT: 11364 case OMP_CLAUSE_IF: 11365 case OMP_CLAUSE_UNTIED: 11366 case OMP_CLAUSE_FINAL: 11367 case OMP_CLAUSE_MERGEABLE: 11368 case OMP_CLAUSE_PRIORITY: 11369 case OMP_CLAUSE_REDUCTION: 11370 case OMP_CLAUSE_IN_REDUCTION: 11371 *gtask_clauses_ptr = c; 11372 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c); 11373 break; 11374 case OMP_CLAUSE_PRIVATE: 11375 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c)) 11376 { 11377 /* We want private on outer for and firstprivate 11378 on task. */ 11379 *gtask_clauses_ptr 11380 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 11381 OMP_CLAUSE_FIRSTPRIVATE); 11382 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c); 11383 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL); 11384 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr); 11385 *gforo_clauses_ptr = c; 11386 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c); 11387 } 11388 else 11389 { 11390 *gtask_clauses_ptr = c; 11391 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c); 11392 } 11393 break; 11394 /* These clauses go into outer taskloop clauses. */ 11395 case OMP_CLAUSE_GRAINSIZE: 11396 case OMP_CLAUSE_NUM_TASKS: 11397 case OMP_CLAUSE_NOGROUP: 11398 *gforo_clauses_ptr = c; 11399 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c); 11400 break; 11401 /* Taskloop clause we duplicate on both taskloops. */ 11402 case OMP_CLAUSE_COLLAPSE: 11403 *gfor_clauses_ptr = c; 11404 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c); 11405 *gforo_clauses_ptr = copy_node (c); 11406 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr); 11407 break; 11408 /* For lastprivate, keep the clause on inner taskloop, and add 11409 a shared clause on task. If the same decl is also firstprivate, 11410 add also firstprivate clause on the inner taskloop. */ 11411 case OMP_CLAUSE_LASTPRIVATE: 11412 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c)) 11413 { 11414 /* For taskloop C++ lastprivate IVs, we want: 11415 1) private on outer taskloop 11416 2) firstprivate and shared on task 11417 3) lastprivate on inner taskloop */ 11418 *gtask_clauses_ptr 11419 = build_omp_clause (OMP_CLAUSE_LOCATION (c), 11420 OMP_CLAUSE_FIRSTPRIVATE); 11421 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c); 11422 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL); 11423 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr); 11424 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1; 11425 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c), 11426 OMP_CLAUSE_PRIVATE); 11427 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c); 11428 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1; 11429 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c); 11430 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr); 11431 } 11432 *gfor_clauses_ptr = c; 11433 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c); 11434 *gtask_clauses_ptr 11435 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED); 11436 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c); 11437 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)) 11438 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1; 11439 gtask_clauses_ptr 11440 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr); 11441 break; 11442 default: 11443 gcc_unreachable (); 11444 } 11445 *gfor_clauses_ptr = NULL_TREE; 11446 *gtask_clauses_ptr = NULL_TREE; 11447 *gforo_clauses_ptr = NULL_TREE; 11448 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE); 11449 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE, 11450 NULL_TREE, NULL_TREE, NULL_TREE); 11451 gimple_omp_task_set_taskloop_p (g, true); 11452 g = gimple_build_bind (NULL_TREE, g, NULL_TREE); 11453 gomp_for *gforo 11454 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses, 11455 gimple_omp_for_collapse (gfor), 11456 gimple_omp_for_pre_body (gfor)); 11457 gimple_omp_for_set_pre_body (gfor, NULL); 11458 gimple_omp_for_set_combined_p (gforo, true); 11459 gimple_omp_for_set_combined_into_p (gfor, true); 11460 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++) 11461 { 11462 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i)); 11463 tree v = create_tmp_var (type); 11464 gimple_omp_for_set_index (gforo, i, v); 11465 t = unshare_expr (gimple_omp_for_initial (gfor, i)); 11466 gimple_omp_for_set_initial (gforo, i, t); 11467 gimple_omp_for_set_cond (gforo, i, 11468 gimple_omp_for_cond (gfor, i)); 11469 t = unshare_expr (gimple_omp_for_final (gfor, i)); 11470 gimple_omp_for_set_final (gforo, i, t); 11471 t = unshare_expr (gimple_omp_for_incr (gfor, i)); 11472 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i)); 11473 TREE_OPERAND (t, 0) = v; 11474 gimple_omp_for_set_incr (gforo, i, t); 11475 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE); 11476 OMP_CLAUSE_DECL (t) = v; 11477 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo); 11478 gimple_omp_for_set_clauses (gforo, t); 11479 } 11480 gimplify_seq_add_stmt (pre_p, gforo); 11481 } 11482 else 11483 gimplify_seq_add_stmt (pre_p, gfor); 11484 if (ret != GS_ALL_DONE) 11485 return GS_ERROR; 11486 *expr_p = NULL_TREE; 11487 return GS_ALL_DONE; 11488 } 11489 11490 /* Helper function of optimize_target_teams, find OMP_TEAMS inside 11491 of OMP_TARGET's body. */ 11492 11493 static tree 11494 find_omp_teams (tree *tp, int *walk_subtrees, void *) 11495 { 11496 *walk_subtrees = 0; 11497 switch (TREE_CODE (*tp)) 11498 { 11499 case OMP_TEAMS: 11500 return *tp; 11501 case BIND_EXPR: 11502 case STATEMENT_LIST: 11503 *walk_subtrees = 1; 11504 break; 11505 default: 11506 break; 11507 } 11508 return NULL_TREE; 11509 } 11510 11511 /* Helper function of optimize_target_teams, determine if the expression 11512 can be computed safely before the target construct on the host. */ 11513 11514 static tree 11515 computable_teams_clause (tree *tp, int *walk_subtrees, void *) 11516 { 11517 splay_tree_node n; 11518 11519 if (TYPE_P (*tp)) 11520 { 11521 *walk_subtrees = 0; 11522 return NULL_TREE; 11523 } 11524 switch (TREE_CODE (*tp)) 11525 { 11526 case VAR_DECL: 11527 case PARM_DECL: 11528 case RESULT_DECL: 11529 *walk_subtrees = 0; 11530 if (error_operand_p (*tp) 11531 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp)) 11532 || DECL_HAS_VALUE_EXPR_P (*tp) 11533 || DECL_THREAD_LOCAL_P (*tp) 11534 || TREE_SIDE_EFFECTS (*tp) 11535 || TREE_THIS_VOLATILE (*tp)) 11536 return *tp; 11537 if (is_global_var (*tp) 11538 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp)) 11539 || lookup_attribute ("omp declare target link", 11540 DECL_ATTRIBUTES (*tp)))) 11541 return *tp; 11542 if (VAR_P (*tp) 11543 && !DECL_SEEN_IN_BIND_EXPR_P (*tp) 11544 && !is_global_var (*tp) 11545 && decl_function_context (*tp) == current_function_decl) 11546 return *tp; 11547 n = splay_tree_lookup (gimplify_omp_ctxp->variables, 11548 (splay_tree_key) *tp); 11549 if (n == NULL) 11550 { 11551 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE) 11552 return NULL_TREE; 11553 return *tp; 11554 } 11555 else if (n->value & GOVD_LOCAL) 11556 return *tp; 11557 else if (n->value & GOVD_FIRSTPRIVATE) 11558 return NULL_TREE; 11559 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO)) 11560 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO)) 11561 return NULL_TREE; 11562 return *tp; 11563 case INTEGER_CST: 11564 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp))) 11565 return *tp; 11566 return NULL_TREE; 11567 case TARGET_EXPR: 11568 if (TARGET_EXPR_INITIAL (*tp) 11569 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL) 11570 return *tp; 11571 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp), 11572 walk_subtrees, NULL); 11573 /* Allow some reasonable subset of integral arithmetics. */ 11574 case PLUS_EXPR: 11575 case MINUS_EXPR: 11576 case MULT_EXPR: 11577 case TRUNC_DIV_EXPR: 11578 case CEIL_DIV_EXPR: 11579 case FLOOR_DIV_EXPR: 11580 case ROUND_DIV_EXPR: 11581 case TRUNC_MOD_EXPR: 11582 case CEIL_MOD_EXPR: 11583 case FLOOR_MOD_EXPR: 11584 case ROUND_MOD_EXPR: 11585 case RDIV_EXPR: 11586 case EXACT_DIV_EXPR: 11587 case MIN_EXPR: 11588 case MAX_EXPR: 11589 case LSHIFT_EXPR: 11590 case RSHIFT_EXPR: 11591 case BIT_IOR_EXPR: 11592 case BIT_XOR_EXPR: 11593 case BIT_AND_EXPR: 11594 case NEGATE_EXPR: 11595 case ABS_EXPR: 11596 case BIT_NOT_EXPR: 11597 case NON_LVALUE_EXPR: 11598 CASE_CONVERT: 11599 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp))) 11600 return *tp; 11601 return NULL_TREE; 11602 /* And disallow anything else, except for comparisons. */ 11603 default: 11604 if (COMPARISON_CLASS_P (*tp)) 11605 return NULL_TREE; 11606 return *tp; 11607 } 11608 } 11609 11610 /* Try to determine if the num_teams and/or thread_limit expressions 11611 can have their values determined already before entering the 11612 target construct. 11613 INTEGER_CSTs trivially are, 11614 integral decls that are firstprivate (explicitly or implicitly) 11615 or explicitly map(always, to:) or map(always, tofrom:) on the target 11616 region too, and expressions involving simple arithmetics on those 11617 too, function calls are not ok, dereferencing something neither etc. 11618 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of 11619 EXPR based on what we find: 11620 0 stands for clause not specified at all, use implementation default 11621 -1 stands for value that can't be determined easily before entering 11622 the target construct. 11623 If teams construct is not present at all, use 1 for num_teams 11624 and 0 for thread_limit (only one team is involved, and the thread 11625 limit is implementation defined. */ 11626 11627 static void 11628 optimize_target_teams (tree target, gimple_seq *pre_p) 11629 { 11630 tree body = OMP_BODY (target); 11631 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL); 11632 tree num_teams = integer_zero_node; 11633 tree thread_limit = integer_zero_node; 11634 location_t num_teams_loc = EXPR_LOCATION (target); 11635 location_t thread_limit_loc = EXPR_LOCATION (target); 11636 tree c, *p, expr; 11637 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp; 11638 11639 if (teams == NULL_TREE) 11640 num_teams = integer_one_node; 11641 else 11642 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c)) 11643 { 11644 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS) 11645 { 11646 p = &num_teams; 11647 num_teams_loc = OMP_CLAUSE_LOCATION (c); 11648 } 11649 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT) 11650 { 11651 p = &thread_limit; 11652 thread_limit_loc = OMP_CLAUSE_LOCATION (c); 11653 } 11654 else 11655 continue; 11656 expr = OMP_CLAUSE_OPERAND (c, 0); 11657 if (TREE_CODE (expr) == INTEGER_CST) 11658 { 11659 *p = expr; 11660 continue; 11661 } 11662 if (walk_tree (&expr, computable_teams_clause, NULL, NULL)) 11663 { 11664 *p = integer_minus_one_node; 11665 continue; 11666 } 11667 *p = expr; 11668 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context; 11669 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false) 11670 == GS_ERROR) 11671 { 11672 gimplify_omp_ctxp = target_ctx; 11673 *p = integer_minus_one_node; 11674 continue; 11675 } 11676 gimplify_omp_ctxp = target_ctx; 11677 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR) 11678 OMP_CLAUSE_OPERAND (c, 0) = *p; 11679 } 11680 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT); 11681 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit; 11682 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target); 11683 OMP_TARGET_CLAUSES (target) = c; 11684 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS); 11685 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams; 11686 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target); 11687 OMP_TARGET_CLAUSES (target) = c; 11688 } 11689 11690 /* Gimplify the gross structure of several OMP constructs. */ 11691 11692 static void 11693 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p) 11694 { 11695 tree expr = *expr_p; 11696 gimple *stmt; 11697 gimple_seq body = NULL; 11698 enum omp_region_type ort; 11699 11700 switch (TREE_CODE (expr)) 11701 { 11702 case OMP_SECTIONS: 11703 case OMP_SINGLE: 11704 ort = ORT_WORKSHARE; 11705 break; 11706 case OMP_TARGET: 11707 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET; 11708 break; 11709 case OACC_KERNELS: 11710 ort = ORT_ACC_KERNELS; 11711 break; 11712 case OACC_PARALLEL: 11713 ort = ORT_ACC_PARALLEL; 11714 break; 11715 case OACC_DATA: 11716 ort = ORT_ACC_DATA; 11717 break; 11718 case OMP_TARGET_DATA: 11719 ort = ORT_TARGET_DATA; 11720 break; 11721 case OMP_TEAMS: 11722 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS; 11723 if (gimplify_omp_ctxp == NULL 11724 || (gimplify_omp_ctxp->region_type == ORT_TARGET 11725 && gimplify_omp_ctxp->outer_context == NULL 11726 && lookup_attribute ("omp declare target", 11727 DECL_ATTRIBUTES (current_function_decl)))) 11728 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS); 11729 break; 11730 case OACC_HOST_DATA: 11731 ort = ORT_ACC_HOST_DATA; 11732 break; 11733 default: 11734 gcc_unreachable (); 11735 } 11736 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort, 11737 TREE_CODE (expr)); 11738 if (TREE_CODE (expr) == OMP_TARGET) 11739 optimize_target_teams (expr, pre_p); 11740 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0 11741 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS) 11742 { 11743 push_gimplify_context (); 11744 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body); 11745 if (gimple_code (g) == GIMPLE_BIND) 11746 pop_gimplify_context (g); 11747 else 11748 pop_gimplify_context (NULL); 11749 if ((ort & ORT_TARGET_DATA) != 0) 11750 { 11751 enum built_in_function end_ix; 11752 switch (TREE_CODE (expr)) 11753 { 11754 case OACC_DATA: 11755 case OACC_HOST_DATA: 11756 end_ix = BUILT_IN_GOACC_DATA_END; 11757 break; 11758 case OMP_TARGET_DATA: 11759 end_ix = BUILT_IN_GOMP_TARGET_END_DATA; 11760 break; 11761 default: 11762 gcc_unreachable (); 11763 } 11764 tree fn = builtin_decl_explicit (end_ix); 11765 g = gimple_build_call (fn, 0); 11766 gimple_seq cleanup = NULL; 11767 gimple_seq_add_stmt (&cleanup, g); 11768 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY); 11769 body = NULL; 11770 gimple_seq_add_stmt (&body, g); 11771 } 11772 } 11773 else 11774 gimplify_and_add (OMP_BODY (expr), &body); 11775 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr), 11776 TREE_CODE (expr)); 11777 11778 switch (TREE_CODE (expr)) 11779 { 11780 case OACC_DATA: 11781 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA, 11782 OMP_CLAUSES (expr)); 11783 break; 11784 case OACC_KERNELS: 11785 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS, 11786 OMP_CLAUSES (expr)); 11787 break; 11788 case OACC_HOST_DATA: 11789 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA, 11790 OMP_CLAUSES (expr)); 11791 break; 11792 case OACC_PARALLEL: 11793 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL, 11794 OMP_CLAUSES (expr)); 11795 break; 11796 case OMP_SECTIONS: 11797 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr)); 11798 break; 11799 case OMP_SINGLE: 11800 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr)); 11801 break; 11802 case OMP_TARGET: 11803 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION, 11804 OMP_CLAUSES (expr)); 11805 break; 11806 case OMP_TARGET_DATA: 11807 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA, 11808 OMP_CLAUSES (expr)); 11809 break; 11810 case OMP_TEAMS: 11811 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr)); 11812 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS) 11813 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true); 11814 break; 11815 default: 11816 gcc_unreachable (); 11817 } 11818 11819 gimplify_seq_add_stmt (pre_p, stmt); 11820 *expr_p = NULL_TREE; 11821 } 11822 11823 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP 11824 target update constructs. */ 11825 11826 static void 11827 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p) 11828 { 11829 tree expr = *expr_p; 11830 int kind; 11831 gomp_target *stmt; 11832 enum omp_region_type ort = ORT_WORKSHARE; 11833 11834 switch (TREE_CODE (expr)) 11835 { 11836 case OACC_ENTER_DATA: 11837 case OACC_EXIT_DATA: 11838 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA; 11839 ort = ORT_ACC; 11840 break; 11841 case OACC_UPDATE: 11842 kind = GF_OMP_TARGET_KIND_OACC_UPDATE; 11843 ort = ORT_ACC; 11844 break; 11845 case OMP_TARGET_UPDATE: 11846 kind = GF_OMP_TARGET_KIND_UPDATE; 11847 break; 11848 case OMP_TARGET_ENTER_DATA: 11849 kind = GF_OMP_TARGET_KIND_ENTER_DATA; 11850 break; 11851 case OMP_TARGET_EXIT_DATA: 11852 kind = GF_OMP_TARGET_KIND_EXIT_DATA; 11853 break; 11854 default: 11855 gcc_unreachable (); 11856 } 11857 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p, 11858 ort, TREE_CODE (expr)); 11859 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr), 11860 TREE_CODE (expr)); 11861 if (TREE_CODE (expr) == OACC_UPDATE 11862 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr), 11863 OMP_CLAUSE_IF_PRESENT)) 11864 { 11865 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present 11866 clause. */ 11867 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c)) 11868 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP) 11869 switch (OMP_CLAUSE_MAP_KIND (c)) 11870 { 11871 case GOMP_MAP_FORCE_TO: 11872 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO); 11873 break; 11874 case GOMP_MAP_FORCE_FROM: 11875 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM); 11876 break; 11877 default: 11878 break; 11879 } 11880 } 11881 else if (TREE_CODE (expr) == OACC_EXIT_DATA 11882 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr), 11883 OMP_CLAUSE_FINALIZE)) 11884 { 11885 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote that "finalize" 11886 semantics apply to all mappings of this OpenACC directive. */ 11887 bool finalize_marked = false; 11888 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c)) 11889 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP) 11890 switch (OMP_CLAUSE_MAP_KIND (c)) 11891 { 11892 case GOMP_MAP_FROM: 11893 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM); 11894 finalize_marked = true; 11895 break; 11896 case GOMP_MAP_RELEASE: 11897 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE); 11898 finalize_marked = true; 11899 break; 11900 default: 11901 /* Check consistency: libgomp relies on the very first data 11902 mapping clause being marked, so make sure we did that before 11903 any other mapping clauses. */ 11904 gcc_assert (finalize_marked); 11905 break; 11906 } 11907 } 11908 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr)); 11909 11910 gimplify_seq_add_stmt (pre_p, stmt); 11911 *expr_p = NULL_TREE; 11912 } 11913 11914 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have 11915 stabilized the lhs of the atomic operation as *ADDR. Return true if 11916 EXPR is this stabilized form. */ 11917 11918 static bool 11919 goa_lhs_expr_p (tree expr, tree addr) 11920 { 11921 /* Also include casts to other type variants. The C front end is fond 11922 of adding these for e.g. volatile variables. This is like 11923 STRIP_TYPE_NOPS but includes the main variant lookup. */ 11924 STRIP_USELESS_TYPE_CONVERSION (expr); 11925 11926 if (TREE_CODE (expr) == INDIRECT_REF) 11927 { 11928 expr = TREE_OPERAND (expr, 0); 11929 while (expr != addr 11930 && (CONVERT_EXPR_P (expr) 11931 || TREE_CODE (expr) == NON_LVALUE_EXPR) 11932 && TREE_CODE (expr) == TREE_CODE (addr) 11933 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr))) 11934 { 11935 expr = TREE_OPERAND (expr, 0); 11936 addr = TREE_OPERAND (addr, 0); 11937 } 11938 if (expr == addr) 11939 return true; 11940 return (TREE_CODE (addr) == ADDR_EXPR 11941 && TREE_CODE (expr) == ADDR_EXPR 11942 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0)); 11943 } 11944 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0)) 11945 return true; 11946 return false; 11947 } 11948 11949 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an 11950 expression does not involve the lhs, evaluate it into a temporary. 11951 Return 1 if the lhs appeared as a subexpression, 0 if it did not, 11952 or -1 if an error was encountered. */ 11953 11954 static int 11955 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr, 11956 tree lhs_var) 11957 { 11958 tree expr = *expr_p; 11959 int saw_lhs; 11960 11961 if (goa_lhs_expr_p (expr, lhs_addr)) 11962 { 11963 *expr_p = lhs_var; 11964 return 1; 11965 } 11966 if (is_gimple_val (expr)) 11967 return 0; 11968 11969 saw_lhs = 0; 11970 switch (TREE_CODE_CLASS (TREE_CODE (expr))) 11971 { 11972 case tcc_binary: 11973 case tcc_comparison: 11974 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr, 11975 lhs_var); 11976 /* FALLTHRU */ 11977 case tcc_unary: 11978 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr, 11979 lhs_var); 11980 break; 11981 case tcc_expression: 11982 switch (TREE_CODE (expr)) 11983 { 11984 case TRUTH_ANDIF_EXPR: 11985 case TRUTH_ORIF_EXPR: 11986 case TRUTH_AND_EXPR: 11987 case TRUTH_OR_EXPR: 11988 case TRUTH_XOR_EXPR: 11989 case BIT_INSERT_EXPR: 11990 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, 11991 lhs_addr, lhs_var); 11992 /* FALLTHRU */ 11993 case TRUTH_NOT_EXPR: 11994 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, 11995 lhs_addr, lhs_var); 11996 break; 11997 case COMPOUND_EXPR: 11998 /* Break out any preevaluations from cp_build_modify_expr. */ 11999 for (; TREE_CODE (expr) == COMPOUND_EXPR; 12000 expr = TREE_OPERAND (expr, 1)) 12001 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p); 12002 *expr_p = expr; 12003 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var); 12004 default: 12005 break; 12006 } 12007 break; 12008 case tcc_reference: 12009 if (TREE_CODE (expr) == BIT_FIELD_REF) 12010 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, 12011 lhs_addr, lhs_var); 12012 break; 12013 default: 12014 break; 12015 } 12016 12017 if (saw_lhs == 0) 12018 { 12019 enum gimplify_status gs; 12020 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue); 12021 if (gs != GS_ALL_DONE) 12022 saw_lhs = -1; 12023 } 12024 12025 return saw_lhs; 12026 } 12027 12028 /* Gimplify an OMP_ATOMIC statement. */ 12029 12030 static enum gimplify_status 12031 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p) 12032 { 12033 tree addr = TREE_OPERAND (*expr_p, 0); 12034 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ 12035 ? NULL : TREE_OPERAND (*expr_p, 1); 12036 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr))); 12037 tree tmp_load; 12038 gomp_atomic_load *loadstmt; 12039 gomp_atomic_store *storestmt; 12040 12041 tmp_load = create_tmp_reg (type); 12042 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0) 12043 return GS_ERROR; 12044 12045 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue) 12046 != GS_ALL_DONE) 12047 return GS_ERROR; 12048 12049 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr, 12050 OMP_ATOMIC_MEMORY_ORDER (*expr_p)); 12051 gimplify_seq_add_stmt (pre_p, loadstmt); 12052 if (rhs) 12053 { 12054 /* BIT_INSERT_EXPR is not valid for non-integral bitfield 12055 representatives. Use BIT_FIELD_REF on the lhs instead. */ 12056 if (TREE_CODE (rhs) == BIT_INSERT_EXPR 12057 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load))) 12058 { 12059 tree bitpos = TREE_OPERAND (rhs, 2); 12060 tree op1 = TREE_OPERAND (rhs, 1); 12061 tree bitsize; 12062 tree tmp_store = tmp_load; 12063 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD) 12064 tmp_store = get_initialized_tmp_var (tmp_load, pre_p, NULL); 12065 if (INTEGRAL_TYPE_P (TREE_TYPE (op1))) 12066 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1))); 12067 else 12068 bitsize = TYPE_SIZE (TREE_TYPE (op1)); 12069 gcc_assert (TREE_OPERAND (rhs, 0) == tmp_load); 12070 tree t = build2_loc (EXPR_LOCATION (rhs), 12071 MODIFY_EXPR, void_type_node, 12072 build3_loc (EXPR_LOCATION (rhs), BIT_FIELD_REF, 12073 TREE_TYPE (op1), tmp_store, bitsize, 12074 bitpos), op1); 12075 gimplify_and_add (t, pre_p); 12076 rhs = tmp_store; 12077 } 12078 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue) 12079 != GS_ALL_DONE) 12080 return GS_ERROR; 12081 } 12082 12083 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ) 12084 rhs = tmp_load; 12085 storestmt 12086 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p)); 12087 gimplify_seq_add_stmt (pre_p, storestmt); 12088 switch (TREE_CODE (*expr_p)) 12089 { 12090 case OMP_ATOMIC_READ: 12091 case OMP_ATOMIC_CAPTURE_OLD: 12092 *expr_p = tmp_load; 12093 gimple_omp_atomic_set_need_value (loadstmt); 12094 break; 12095 case OMP_ATOMIC_CAPTURE_NEW: 12096 *expr_p = rhs; 12097 gimple_omp_atomic_set_need_value (storestmt); 12098 break; 12099 default: 12100 *expr_p = NULL; 12101 break; 12102 } 12103 12104 return GS_ALL_DONE; 12105 } 12106 12107 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the 12108 body, and adding some EH bits. */ 12109 12110 static enum gimplify_status 12111 gimplify_transaction (tree *expr_p, gimple_seq *pre_p) 12112 { 12113 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr); 12114 gimple *body_stmt; 12115 gtransaction *trans_stmt; 12116 gimple_seq body = NULL; 12117 int subcode = 0; 12118 12119 /* Wrap the transaction body in a BIND_EXPR so we have a context 12120 where to put decls for OMP. */ 12121 if (TREE_CODE (tbody) != BIND_EXPR) 12122 { 12123 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL); 12124 TREE_SIDE_EFFECTS (bind) = 1; 12125 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody)); 12126 TRANSACTION_EXPR_BODY (expr) = bind; 12127 } 12128 12129 push_gimplify_context (); 12130 temp = voidify_wrapper_expr (*expr_p, NULL); 12131 12132 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body); 12133 pop_gimplify_context (body_stmt); 12134 12135 trans_stmt = gimple_build_transaction (body); 12136 if (TRANSACTION_EXPR_OUTER (expr)) 12137 subcode = GTMA_IS_OUTER; 12138 else if (TRANSACTION_EXPR_RELAXED (expr)) 12139 subcode = GTMA_IS_RELAXED; 12140 gimple_transaction_set_subcode (trans_stmt, subcode); 12141 12142 gimplify_seq_add_stmt (pre_p, trans_stmt); 12143 12144 if (temp) 12145 { 12146 *expr_p = temp; 12147 return GS_OK; 12148 } 12149 12150 *expr_p = NULL_TREE; 12151 return GS_ALL_DONE; 12152 } 12153 12154 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY 12155 is the OMP_BODY of the original EXPR (which has already been 12156 gimplified so it's not present in the EXPR). 12157 12158 Return the gimplified GIMPLE_OMP_ORDERED tuple. */ 12159 12160 static gimple * 12161 gimplify_omp_ordered (tree expr, gimple_seq body) 12162 { 12163 tree c, decls; 12164 int failures = 0; 12165 unsigned int i; 12166 tree source_c = NULL_TREE; 12167 tree sink_c = NULL_TREE; 12168 12169 if (gimplify_omp_ctxp) 12170 { 12171 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c)) 12172 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND 12173 && gimplify_omp_ctxp->loop_iter_var.is_empty () 12174 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK 12175 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)) 12176 { 12177 error_at (OMP_CLAUSE_LOCATION (c), 12178 "%<ordered%> construct with %<depend%> clause must be " 12179 "closely nested inside a loop with %<ordered%> clause " 12180 "with a parameter"); 12181 failures++; 12182 } 12183 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND 12184 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK) 12185 { 12186 bool fail = false; 12187 for (decls = OMP_CLAUSE_DECL (c), i = 0; 12188 decls && TREE_CODE (decls) == TREE_LIST; 12189 decls = TREE_CHAIN (decls), ++i) 12190 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2) 12191 continue; 12192 else if (TREE_VALUE (decls) 12193 != gimplify_omp_ctxp->loop_iter_var[2 * i]) 12194 { 12195 error_at (OMP_CLAUSE_LOCATION (c), 12196 "variable %qE is not an iteration " 12197 "of outermost loop %d, expected %qE", 12198 TREE_VALUE (decls), i + 1, 12199 gimplify_omp_ctxp->loop_iter_var[2 * i]); 12200 fail = true; 12201 failures++; 12202 } 12203 else 12204 TREE_VALUE (decls) 12205 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1]; 12206 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2) 12207 { 12208 error_at (OMP_CLAUSE_LOCATION (c), 12209 "number of variables in %<depend%> clause with " 12210 "%<sink%> modifier does not match number of " 12211 "iteration variables"); 12212 failures++; 12213 } 12214 sink_c = c; 12215 } 12216 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND 12217 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE) 12218 { 12219 if (source_c) 12220 { 12221 error_at (OMP_CLAUSE_LOCATION (c), 12222 "more than one %<depend%> clause with %<source%> " 12223 "modifier on an %<ordered%> construct"); 12224 failures++; 12225 } 12226 else 12227 source_c = c; 12228 } 12229 } 12230 if (source_c && sink_c) 12231 { 12232 error_at (OMP_CLAUSE_LOCATION (source_c), 12233 "%<depend%> clause with %<source%> modifier specified " 12234 "together with %<depend%> clauses with %<sink%> modifier " 12235 "on the same construct"); 12236 failures++; 12237 } 12238 12239 if (failures) 12240 return gimple_build_nop (); 12241 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr)); 12242 } 12243 12244 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the 12245 expression produces a value to be used as an operand inside a GIMPLE 12246 statement, the value will be stored back in *EXPR_P. This value will 12247 be a tree of class tcc_declaration, tcc_constant, tcc_reference or 12248 an SSA_NAME. The corresponding sequence of GIMPLE statements is 12249 emitted in PRE_P and POST_P. 12250 12251 Additionally, this process may overwrite parts of the input 12252 expression during gimplification. Ideally, it should be 12253 possible to do non-destructive gimplification. 12254 12255 EXPR_P points to the GENERIC expression to convert to GIMPLE. If 12256 the expression needs to evaluate to a value to be used as 12257 an operand in a GIMPLE statement, this value will be stored in 12258 *EXPR_P on exit. This happens when the caller specifies one 12259 of fb_lvalue or fb_rvalue fallback flags. 12260 12261 PRE_P will contain the sequence of GIMPLE statements corresponding 12262 to the evaluation of EXPR and all the side-effects that must 12263 be executed before the main expression. On exit, the last 12264 statement of PRE_P is the core statement being gimplified. For 12265 instance, when gimplifying 'if (++a)' the last statement in 12266 PRE_P will be 'if (t.1)' where t.1 is the result of 12267 pre-incrementing 'a'. 12268 12269 POST_P will contain the sequence of GIMPLE statements corresponding 12270 to the evaluation of all the side-effects that must be executed 12271 after the main expression. If this is NULL, the post 12272 side-effects are stored at the end of PRE_P. 12273 12274 The reason why the output is split in two is to handle post 12275 side-effects explicitly. In some cases, an expression may have 12276 inner and outer post side-effects which need to be emitted in 12277 an order different from the one given by the recursive 12278 traversal. For instance, for the expression (*p--)++ the post 12279 side-effects of '--' must actually occur *after* the post 12280 side-effects of '++'. However, gimplification will first visit 12281 the inner expression, so if a separate POST sequence was not 12282 used, the resulting sequence would be: 12283 12284 1 t.1 = *p 12285 2 p = p - 1 12286 3 t.2 = t.1 + 1 12287 4 *p = t.2 12288 12289 However, the post-decrement operation in line #2 must not be 12290 evaluated until after the store to *p at line #4, so the 12291 correct sequence should be: 12292 12293 1 t.1 = *p 12294 2 t.2 = t.1 + 1 12295 3 *p = t.2 12296 4 p = p - 1 12297 12298 So, by specifying a separate post queue, it is possible 12299 to emit the post side-effects in the correct order. 12300 If POST_P is NULL, an internal queue will be used. Before 12301 returning to the caller, the sequence POST_P is appended to 12302 the main output sequence PRE_P. 12303 12304 GIMPLE_TEST_F points to a function that takes a tree T and 12305 returns nonzero if T is in the GIMPLE form requested by the 12306 caller. The GIMPLE predicates are in gimple.c. 12307 12308 FALLBACK tells the function what sort of a temporary we want if 12309 gimplification cannot produce an expression that complies with 12310 GIMPLE_TEST_F. 12311 12312 fb_none means that no temporary should be generated 12313 fb_rvalue means that an rvalue is OK to generate 12314 fb_lvalue means that an lvalue is OK to generate 12315 fb_either means that either is OK, but an lvalue is preferable. 12316 fb_mayfail means that gimplification may fail (in which case 12317 GS_ERROR will be returned) 12318 12319 The return value is either GS_ERROR or GS_ALL_DONE, since this 12320 function iterates until EXPR is completely gimplified or an error 12321 occurs. */ 12322 12323 enum gimplify_status 12324 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 12325 bool (*gimple_test_f) (tree), fallback_t fallback) 12326 { 12327 tree tmp; 12328 gimple_seq internal_pre = NULL; 12329 gimple_seq internal_post = NULL; 12330 tree save_expr; 12331 bool is_statement; 12332 location_t saved_location; 12333 enum gimplify_status ret; 12334 gimple_stmt_iterator pre_last_gsi, post_last_gsi; 12335 tree label; 12336 12337 save_expr = *expr_p; 12338 if (save_expr == NULL_TREE) 12339 return GS_ALL_DONE; 12340 12341 /* If we are gimplifying a top-level statement, PRE_P must be valid. */ 12342 is_statement = gimple_test_f == is_gimple_stmt; 12343 if (is_statement) 12344 gcc_assert (pre_p); 12345 12346 /* Consistency checks. */ 12347 if (gimple_test_f == is_gimple_reg) 12348 gcc_assert (fallback & (fb_rvalue | fb_lvalue)); 12349 else if (gimple_test_f == is_gimple_val 12350 || gimple_test_f == is_gimple_call_addr 12351 || gimple_test_f == is_gimple_condexpr 12352 || gimple_test_f == is_gimple_mem_rhs 12353 || gimple_test_f == is_gimple_mem_rhs_or_call 12354 || gimple_test_f == is_gimple_reg_rhs 12355 || gimple_test_f == is_gimple_reg_rhs_or_call 12356 || gimple_test_f == is_gimple_asm_val 12357 || gimple_test_f == is_gimple_mem_ref_addr) 12358 gcc_assert (fallback & fb_rvalue); 12359 else if (gimple_test_f == is_gimple_min_lval 12360 || gimple_test_f == is_gimple_lvalue) 12361 gcc_assert (fallback & fb_lvalue); 12362 else if (gimple_test_f == is_gimple_addressable) 12363 gcc_assert (fallback & fb_either); 12364 else if (gimple_test_f == is_gimple_stmt) 12365 gcc_assert (fallback == fb_none); 12366 else 12367 { 12368 /* We should have recognized the GIMPLE_TEST_F predicate to 12369 know what kind of fallback to use in case a temporary is 12370 needed to hold the value or address of *EXPR_P. */ 12371 gcc_unreachable (); 12372 } 12373 12374 /* We used to check the predicate here and return immediately if it 12375 succeeds. This is wrong; the design is for gimplification to be 12376 idempotent, and for the predicates to only test for valid forms, not 12377 whether they are fully simplified. */ 12378 if (pre_p == NULL) 12379 pre_p = &internal_pre; 12380 12381 if (post_p == NULL) 12382 post_p = &internal_post; 12383 12384 /* Remember the last statements added to PRE_P and POST_P. Every 12385 new statement added by the gimplification helpers needs to be 12386 annotated with location information. To centralize the 12387 responsibility, we remember the last statement that had been 12388 added to both queues before gimplifying *EXPR_P. If 12389 gimplification produces new statements in PRE_P and POST_P, those 12390 statements will be annotated with the same location information 12391 as *EXPR_P. */ 12392 pre_last_gsi = gsi_last (*pre_p); 12393 post_last_gsi = gsi_last (*post_p); 12394 12395 saved_location = input_location; 12396 if (save_expr != error_mark_node 12397 && EXPR_HAS_LOCATION (*expr_p)) 12398 input_location = EXPR_LOCATION (*expr_p); 12399 12400 /* Loop over the specific gimplifiers until the toplevel node 12401 remains the same. */ 12402 do 12403 { 12404 /* Strip away as many useless type conversions as possible 12405 at the toplevel. */ 12406 STRIP_USELESS_TYPE_CONVERSION (*expr_p); 12407 12408 /* Remember the expr. */ 12409 save_expr = *expr_p; 12410 12411 /* Die, die, die, my darling. */ 12412 if (error_operand_p (save_expr)) 12413 { 12414 ret = GS_ERROR; 12415 break; 12416 } 12417 12418 /* Do any language-specific gimplification. */ 12419 ret = ((enum gimplify_status) 12420 lang_hooks.gimplify_expr (expr_p, pre_p, post_p)); 12421 if (ret == GS_OK) 12422 { 12423 if (*expr_p == NULL_TREE) 12424 break; 12425 if (*expr_p != save_expr) 12426 continue; 12427 } 12428 else if (ret != GS_UNHANDLED) 12429 break; 12430 12431 /* Make sure that all the cases set 'ret' appropriately. */ 12432 ret = GS_UNHANDLED; 12433 switch (TREE_CODE (*expr_p)) 12434 { 12435 /* First deal with the special cases. */ 12436 12437 case POSTINCREMENT_EXPR: 12438 case POSTDECREMENT_EXPR: 12439 case PREINCREMENT_EXPR: 12440 case PREDECREMENT_EXPR: 12441 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p, 12442 fallback != fb_none, 12443 TREE_TYPE (*expr_p)); 12444 break; 12445 12446 case VIEW_CONVERT_EXPR: 12447 if ((fallback & fb_rvalue) 12448 && is_gimple_reg_type (TREE_TYPE (*expr_p)) 12449 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))) 12450 { 12451 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 12452 post_p, is_gimple_val, fb_rvalue); 12453 recalculate_side_effects (*expr_p); 12454 break; 12455 } 12456 /* Fallthru. */ 12457 12458 case ARRAY_REF: 12459 case ARRAY_RANGE_REF: 12460 case REALPART_EXPR: 12461 case IMAGPART_EXPR: 12462 case COMPONENT_REF: 12463 ret = gimplify_compound_lval (expr_p, pre_p, post_p, 12464 fallback ? fallback : fb_rvalue); 12465 break; 12466 12467 case COND_EXPR: 12468 ret = gimplify_cond_expr (expr_p, pre_p, fallback); 12469 12470 /* C99 code may assign to an array in a structure value of a 12471 conditional expression, and this has undefined behavior 12472 only on execution, so create a temporary if an lvalue is 12473 required. */ 12474 if (fallback == fb_lvalue) 12475 { 12476 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false); 12477 mark_addressable (*expr_p); 12478 ret = GS_OK; 12479 } 12480 break; 12481 12482 case CALL_EXPR: 12483 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none); 12484 12485 /* C99 code may assign to an array in a structure returned 12486 from a function, and this has undefined behavior only on 12487 execution, so create a temporary if an lvalue is 12488 required. */ 12489 if (fallback == fb_lvalue) 12490 { 12491 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false); 12492 mark_addressable (*expr_p); 12493 ret = GS_OK; 12494 } 12495 break; 12496 12497 case TREE_LIST: 12498 gcc_unreachable (); 12499 12500 case COMPOUND_EXPR: 12501 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none); 12502 break; 12503 12504 case COMPOUND_LITERAL_EXPR: 12505 ret = gimplify_compound_literal_expr (expr_p, pre_p, 12506 gimple_test_f, fallback); 12507 break; 12508 12509 case MODIFY_EXPR: 12510 case INIT_EXPR: 12511 ret = gimplify_modify_expr (expr_p, pre_p, post_p, 12512 fallback != fb_none); 12513 break; 12514 12515 case TRUTH_ANDIF_EXPR: 12516 case TRUTH_ORIF_EXPR: 12517 { 12518 /* Preserve the original type of the expression and the 12519 source location of the outer expression. */ 12520 tree org_type = TREE_TYPE (*expr_p); 12521 *expr_p = gimple_boolify (*expr_p); 12522 *expr_p = build3_loc (input_location, COND_EXPR, 12523 org_type, *expr_p, 12524 fold_convert_loc 12525 (input_location, 12526 org_type, boolean_true_node), 12527 fold_convert_loc 12528 (input_location, 12529 org_type, boolean_false_node)); 12530 ret = GS_OK; 12531 break; 12532 } 12533 12534 case TRUTH_NOT_EXPR: 12535 { 12536 tree type = TREE_TYPE (*expr_p); 12537 /* The parsers are careful to generate TRUTH_NOT_EXPR 12538 only with operands that are always zero or one. 12539 We do not fold here but handle the only interesting case 12540 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */ 12541 *expr_p = gimple_boolify (*expr_p); 12542 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1) 12543 *expr_p = build1_loc (input_location, BIT_NOT_EXPR, 12544 TREE_TYPE (*expr_p), 12545 TREE_OPERAND (*expr_p, 0)); 12546 else 12547 *expr_p = build2_loc (input_location, BIT_XOR_EXPR, 12548 TREE_TYPE (*expr_p), 12549 TREE_OPERAND (*expr_p, 0), 12550 build_int_cst (TREE_TYPE (*expr_p), 1)); 12551 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p))) 12552 *expr_p = fold_convert_loc (input_location, type, *expr_p); 12553 ret = GS_OK; 12554 break; 12555 } 12556 12557 case ADDR_EXPR: 12558 ret = gimplify_addr_expr (expr_p, pre_p, post_p); 12559 break; 12560 12561 case ANNOTATE_EXPR: 12562 { 12563 tree cond = TREE_OPERAND (*expr_p, 0); 12564 tree kind = TREE_OPERAND (*expr_p, 1); 12565 tree data = TREE_OPERAND (*expr_p, 2); 12566 tree type = TREE_TYPE (cond); 12567 if (!INTEGRAL_TYPE_P (type)) 12568 { 12569 *expr_p = cond; 12570 ret = GS_OK; 12571 break; 12572 } 12573 tree tmp = create_tmp_var (type); 12574 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p)); 12575 gcall *call 12576 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data); 12577 gimple_call_set_lhs (call, tmp); 12578 gimplify_seq_add_stmt (pre_p, call); 12579 *expr_p = tmp; 12580 ret = GS_ALL_DONE; 12581 break; 12582 } 12583 12584 case VA_ARG_EXPR: 12585 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p); 12586 break; 12587 12588 CASE_CONVERT: 12589 if (IS_EMPTY_STMT (*expr_p)) 12590 { 12591 ret = GS_ALL_DONE; 12592 break; 12593 } 12594 12595 if (VOID_TYPE_P (TREE_TYPE (*expr_p)) 12596 || fallback == fb_none) 12597 { 12598 /* Just strip a conversion to void (or in void context) and 12599 try again. */ 12600 *expr_p = TREE_OPERAND (*expr_p, 0); 12601 ret = GS_OK; 12602 break; 12603 } 12604 12605 ret = gimplify_conversion (expr_p); 12606 if (ret == GS_ERROR) 12607 break; 12608 if (*expr_p != save_expr) 12609 break; 12610 /* FALLTHRU */ 12611 12612 case FIX_TRUNC_EXPR: 12613 /* unary_expr: ... | '(' cast ')' val | ... */ 12614 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 12615 is_gimple_val, fb_rvalue); 12616 recalculate_side_effects (*expr_p); 12617 break; 12618 12619 case INDIRECT_REF: 12620 { 12621 bool volatilep = TREE_THIS_VOLATILE (*expr_p); 12622 bool notrap = TREE_THIS_NOTRAP (*expr_p); 12623 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0)); 12624 12625 *expr_p = fold_indirect_ref_loc (input_location, *expr_p); 12626 if (*expr_p != save_expr) 12627 { 12628 ret = GS_OK; 12629 break; 12630 } 12631 12632 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 12633 is_gimple_reg, fb_rvalue); 12634 if (ret == GS_ERROR) 12635 break; 12636 12637 recalculate_side_effects (*expr_p); 12638 *expr_p = fold_build2_loc (input_location, MEM_REF, 12639 TREE_TYPE (*expr_p), 12640 TREE_OPERAND (*expr_p, 0), 12641 build_int_cst (saved_ptr_type, 0)); 12642 TREE_THIS_VOLATILE (*expr_p) = volatilep; 12643 TREE_THIS_NOTRAP (*expr_p) = notrap; 12644 ret = GS_OK; 12645 break; 12646 } 12647 12648 /* We arrive here through the various re-gimplifcation paths. */ 12649 case MEM_REF: 12650 /* First try re-folding the whole thing. */ 12651 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p), 12652 TREE_OPERAND (*expr_p, 0), 12653 TREE_OPERAND (*expr_p, 1)); 12654 if (tmp) 12655 { 12656 REF_REVERSE_STORAGE_ORDER (tmp) 12657 = REF_REVERSE_STORAGE_ORDER (*expr_p); 12658 *expr_p = tmp; 12659 recalculate_side_effects (*expr_p); 12660 ret = GS_OK; 12661 break; 12662 } 12663 /* Avoid re-gimplifying the address operand if it is already 12664 in suitable form. Re-gimplifying would mark the address 12665 operand addressable. Always gimplify when not in SSA form 12666 as we still may have to gimplify decls with value-exprs. */ 12667 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun) 12668 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0))) 12669 { 12670 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 12671 is_gimple_mem_ref_addr, fb_rvalue); 12672 if (ret == GS_ERROR) 12673 break; 12674 } 12675 recalculate_side_effects (*expr_p); 12676 ret = GS_ALL_DONE; 12677 break; 12678 12679 /* Constants need not be gimplified. */ 12680 case INTEGER_CST: 12681 case REAL_CST: 12682 case FIXED_CST: 12683 case STRING_CST: 12684 case COMPLEX_CST: 12685 case VECTOR_CST: 12686 /* Drop the overflow flag on constants, we do not want 12687 that in the GIMPLE IL. */ 12688 if (TREE_OVERFLOW_P (*expr_p)) 12689 *expr_p = drop_tree_overflow (*expr_p); 12690 ret = GS_ALL_DONE; 12691 break; 12692 12693 case CONST_DECL: 12694 /* If we require an lvalue, such as for ADDR_EXPR, retain the 12695 CONST_DECL node. Otherwise the decl is replaceable by its 12696 value. */ 12697 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */ 12698 if (fallback & fb_lvalue) 12699 ret = GS_ALL_DONE; 12700 else 12701 { 12702 *expr_p = DECL_INITIAL (*expr_p); 12703 ret = GS_OK; 12704 } 12705 break; 12706 12707 case DECL_EXPR: 12708 ret = gimplify_decl_expr (expr_p, pre_p); 12709 break; 12710 12711 case BIND_EXPR: 12712 ret = gimplify_bind_expr (expr_p, pre_p); 12713 break; 12714 12715 case LOOP_EXPR: 12716 ret = gimplify_loop_expr (expr_p, pre_p); 12717 break; 12718 12719 case SWITCH_EXPR: 12720 ret = gimplify_switch_expr (expr_p, pre_p); 12721 break; 12722 12723 case EXIT_EXPR: 12724 ret = gimplify_exit_expr (expr_p); 12725 break; 12726 12727 case GOTO_EXPR: 12728 /* If the target is not LABEL, then it is a computed jump 12729 and the target needs to be gimplified. */ 12730 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL) 12731 { 12732 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p, 12733 NULL, is_gimple_val, fb_rvalue); 12734 if (ret == GS_ERROR) 12735 break; 12736 } 12737 gimplify_seq_add_stmt (pre_p, 12738 gimple_build_goto (GOTO_DESTINATION (*expr_p))); 12739 ret = GS_ALL_DONE; 12740 break; 12741 12742 case PREDICT_EXPR: 12743 gimplify_seq_add_stmt (pre_p, 12744 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p), 12745 PREDICT_EXPR_OUTCOME (*expr_p))); 12746 ret = GS_ALL_DONE; 12747 break; 12748 12749 case LABEL_EXPR: 12750 ret = gimplify_label_expr (expr_p, pre_p); 12751 label = LABEL_EXPR_LABEL (*expr_p); 12752 gcc_assert (decl_function_context (label) == current_function_decl); 12753 12754 /* If the label is used in a goto statement, or address of the label 12755 is taken, we need to unpoison all variables that were seen so far. 12756 Doing so would prevent us from reporting a false positives. */ 12757 if (asan_poisoned_variables 12758 && asan_used_labels != NULL 12759 && asan_used_labels->contains (label)) 12760 asan_poison_variables (asan_poisoned_variables, false, pre_p); 12761 break; 12762 12763 case CASE_LABEL_EXPR: 12764 ret = gimplify_case_label_expr (expr_p, pre_p); 12765 12766 if (gimplify_ctxp->live_switch_vars) 12767 asan_poison_variables (gimplify_ctxp->live_switch_vars, false, 12768 pre_p); 12769 break; 12770 12771 case RETURN_EXPR: 12772 ret = gimplify_return_expr (*expr_p, pre_p); 12773 break; 12774 12775 case CONSTRUCTOR: 12776 /* Don't reduce this in place; let gimplify_init_constructor work its 12777 magic. Buf if we're just elaborating this for side effects, just 12778 gimplify any element that has side-effects. */ 12779 if (fallback == fb_none) 12780 { 12781 unsigned HOST_WIDE_INT ix; 12782 tree val; 12783 tree temp = NULL_TREE; 12784 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val) 12785 if (TREE_SIDE_EFFECTS (val)) 12786 append_to_statement_list (val, &temp); 12787 12788 *expr_p = temp; 12789 ret = temp ? GS_OK : GS_ALL_DONE; 12790 } 12791 /* C99 code may assign to an array in a constructed 12792 structure or union, and this has undefined behavior only 12793 on execution, so create a temporary if an lvalue is 12794 required. */ 12795 else if (fallback == fb_lvalue) 12796 { 12797 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false); 12798 mark_addressable (*expr_p); 12799 ret = GS_OK; 12800 } 12801 else 12802 ret = GS_ALL_DONE; 12803 break; 12804 12805 /* The following are special cases that are not handled by the 12806 original GIMPLE grammar. */ 12807 12808 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and 12809 eliminated. */ 12810 case SAVE_EXPR: 12811 ret = gimplify_save_expr (expr_p, pre_p, post_p); 12812 break; 12813 12814 case BIT_FIELD_REF: 12815 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 12816 post_p, is_gimple_lvalue, fb_either); 12817 recalculate_side_effects (*expr_p); 12818 break; 12819 12820 case TARGET_MEM_REF: 12821 { 12822 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE; 12823 12824 if (TMR_BASE (*expr_p)) 12825 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p, 12826 post_p, is_gimple_mem_ref_addr, fb_either); 12827 if (TMR_INDEX (*expr_p)) 12828 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p, 12829 post_p, is_gimple_val, fb_rvalue); 12830 if (TMR_INDEX2 (*expr_p)) 12831 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p, 12832 post_p, is_gimple_val, fb_rvalue); 12833 /* TMR_STEP and TMR_OFFSET are always integer constants. */ 12834 ret = MIN (r0, r1); 12835 } 12836 break; 12837 12838 case NON_LVALUE_EXPR: 12839 /* This should have been stripped above. */ 12840 gcc_unreachable (); 12841 12842 case ASM_EXPR: 12843 ret = gimplify_asm_expr (expr_p, pre_p, post_p); 12844 break; 12845 12846 case TRY_FINALLY_EXPR: 12847 case TRY_CATCH_EXPR: 12848 { 12849 gimple_seq eval, cleanup; 12850 gtry *try_; 12851 12852 /* Calls to destructors are generated automatically in FINALLY/CATCH 12853 block. They should have location as UNKNOWN_LOCATION. However, 12854 gimplify_call_expr will reset these call stmts to input_location 12855 if it finds stmt's location is unknown. To prevent resetting for 12856 destructors, we set the input_location to unknown. 12857 Note that this only affects the destructor calls in FINALLY/CATCH 12858 block, and will automatically reset to its original value by the 12859 end of gimplify_expr. */ 12860 input_location = UNKNOWN_LOCATION; 12861 eval = cleanup = NULL; 12862 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval); 12863 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup); 12864 /* Don't create bogus GIMPLE_TRY with empty cleanup. */ 12865 if (gimple_seq_empty_p (cleanup)) 12866 { 12867 gimple_seq_add_seq (pre_p, eval); 12868 ret = GS_ALL_DONE; 12869 break; 12870 } 12871 try_ = gimple_build_try (eval, cleanup, 12872 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR 12873 ? GIMPLE_TRY_FINALLY 12874 : GIMPLE_TRY_CATCH); 12875 if (EXPR_HAS_LOCATION (save_expr)) 12876 gimple_set_location (try_, EXPR_LOCATION (save_expr)); 12877 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION) 12878 gimple_set_location (try_, saved_location); 12879 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR) 12880 gimple_try_set_catch_is_cleanup (try_, 12881 TRY_CATCH_IS_CLEANUP (*expr_p)); 12882 gimplify_seq_add_stmt (pre_p, try_); 12883 ret = GS_ALL_DONE; 12884 break; 12885 } 12886 12887 case CLEANUP_POINT_EXPR: 12888 ret = gimplify_cleanup_point_expr (expr_p, pre_p); 12889 break; 12890 12891 case TARGET_EXPR: 12892 ret = gimplify_target_expr (expr_p, pre_p, post_p); 12893 break; 12894 12895 case CATCH_EXPR: 12896 { 12897 gimple *c; 12898 gimple_seq handler = NULL; 12899 gimplify_and_add (CATCH_BODY (*expr_p), &handler); 12900 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler); 12901 gimplify_seq_add_stmt (pre_p, c); 12902 ret = GS_ALL_DONE; 12903 break; 12904 } 12905 12906 case EH_FILTER_EXPR: 12907 { 12908 gimple *ehf; 12909 gimple_seq failure = NULL; 12910 12911 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure); 12912 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure); 12913 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p)); 12914 gimplify_seq_add_stmt (pre_p, ehf); 12915 ret = GS_ALL_DONE; 12916 break; 12917 } 12918 12919 case OBJ_TYPE_REF: 12920 { 12921 enum gimplify_status r0, r1; 12922 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, 12923 post_p, is_gimple_val, fb_rvalue); 12924 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, 12925 post_p, is_gimple_val, fb_rvalue); 12926 TREE_SIDE_EFFECTS (*expr_p) = 0; 12927 ret = MIN (r0, r1); 12928 } 12929 break; 12930 12931 case LABEL_DECL: 12932 /* We get here when taking the address of a label. We mark 12933 the label as "forced"; meaning it can never be removed and 12934 it is a potential target for any computed goto. */ 12935 FORCED_LABEL (*expr_p) = 1; 12936 ret = GS_ALL_DONE; 12937 break; 12938 12939 case STATEMENT_LIST: 12940 ret = gimplify_statement_list (expr_p, pre_p); 12941 break; 12942 12943 case WITH_SIZE_EXPR: 12944 { 12945 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 12946 post_p == &internal_post ? NULL : post_p, 12947 gimple_test_f, fallback); 12948 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, 12949 is_gimple_val, fb_rvalue); 12950 ret = GS_ALL_DONE; 12951 } 12952 break; 12953 12954 case VAR_DECL: 12955 case PARM_DECL: 12956 ret = gimplify_var_or_parm_decl (expr_p); 12957 break; 12958 12959 case RESULT_DECL: 12960 /* When within an OMP context, notice uses of variables. */ 12961 if (gimplify_omp_ctxp) 12962 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true); 12963 ret = GS_ALL_DONE; 12964 break; 12965 12966 case DEBUG_EXPR_DECL: 12967 gcc_unreachable (); 12968 12969 case DEBUG_BEGIN_STMT: 12970 gimplify_seq_add_stmt (pre_p, 12971 gimple_build_debug_begin_stmt 12972 (TREE_BLOCK (*expr_p), 12973 EXPR_LOCATION (*expr_p))); 12974 ret = GS_ALL_DONE; 12975 *expr_p = NULL; 12976 break; 12977 12978 case SSA_NAME: 12979 /* Allow callbacks into the gimplifier during optimization. */ 12980 ret = GS_ALL_DONE; 12981 break; 12982 12983 case OMP_PARALLEL: 12984 gimplify_omp_parallel (expr_p, pre_p); 12985 ret = GS_ALL_DONE; 12986 break; 12987 12988 case OMP_TASK: 12989 gimplify_omp_task (expr_p, pre_p); 12990 ret = GS_ALL_DONE; 12991 break; 12992 12993 case OMP_FOR: 12994 case OMP_SIMD: 12995 case OMP_DISTRIBUTE: 12996 case OMP_TASKLOOP: 12997 case OACC_LOOP: 12998 ret = gimplify_omp_for (expr_p, pre_p); 12999 break; 13000 13001 case OACC_CACHE: 13002 gimplify_oacc_cache (expr_p, pre_p); 13003 ret = GS_ALL_DONE; 13004 break; 13005 13006 case OACC_DECLARE: 13007 gimplify_oacc_declare (expr_p, pre_p); 13008 ret = GS_ALL_DONE; 13009 break; 13010 13011 case OACC_HOST_DATA: 13012 case OACC_DATA: 13013 case OACC_KERNELS: 13014 case OACC_PARALLEL: 13015 case OMP_SECTIONS: 13016 case OMP_SINGLE: 13017 case OMP_TARGET: 13018 case OMP_TARGET_DATA: 13019 case OMP_TEAMS: 13020 gimplify_omp_workshare (expr_p, pre_p); 13021 ret = GS_ALL_DONE; 13022 break; 13023 13024 case OACC_ENTER_DATA: 13025 case OACC_EXIT_DATA: 13026 case OACC_UPDATE: 13027 case OMP_TARGET_UPDATE: 13028 case OMP_TARGET_ENTER_DATA: 13029 case OMP_TARGET_EXIT_DATA: 13030 gimplify_omp_target_update (expr_p, pre_p); 13031 ret = GS_ALL_DONE; 13032 break; 13033 13034 case OMP_SECTION: 13035 case OMP_MASTER: 13036 case OMP_ORDERED: 13037 case OMP_CRITICAL: 13038 { 13039 gimple_seq body = NULL; 13040 gimple *g; 13041 13042 gimplify_and_add (OMP_BODY (*expr_p), &body); 13043 switch (TREE_CODE (*expr_p)) 13044 { 13045 case OMP_SECTION: 13046 g = gimple_build_omp_section (body); 13047 break; 13048 case OMP_MASTER: 13049 g = gimple_build_omp_master (body); 13050 break; 13051 case OMP_ORDERED: 13052 g = gimplify_omp_ordered (*expr_p, body); 13053 break; 13054 case OMP_CRITICAL: 13055 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p), 13056 pre_p, ORT_WORKSHARE, OMP_CRITICAL); 13057 gimplify_adjust_omp_clauses (pre_p, body, 13058 &OMP_CRITICAL_CLAUSES (*expr_p), 13059 OMP_CRITICAL); 13060 g = gimple_build_omp_critical (body, 13061 OMP_CRITICAL_NAME (*expr_p), 13062 OMP_CRITICAL_CLAUSES (*expr_p)); 13063 break; 13064 default: 13065 gcc_unreachable (); 13066 } 13067 gimplify_seq_add_stmt (pre_p, g); 13068 ret = GS_ALL_DONE; 13069 break; 13070 } 13071 13072 case OMP_TASKGROUP: 13073 { 13074 gimple_seq body = NULL; 13075 13076 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p); 13077 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP, 13078 OMP_TASKGROUP); 13079 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP); 13080 gimplify_and_add (OMP_BODY (*expr_p), &body); 13081 gimple_seq cleanup = NULL; 13082 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END); 13083 gimple *g = gimple_build_call (fn, 0); 13084 gimple_seq_add_stmt (&cleanup, g); 13085 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY); 13086 body = NULL; 13087 gimple_seq_add_stmt (&body, g); 13088 g = gimple_build_omp_taskgroup (body, *pclauses); 13089 gimplify_seq_add_stmt (pre_p, g); 13090 ret = GS_ALL_DONE; 13091 break; 13092 } 13093 13094 case OMP_ATOMIC: 13095 case OMP_ATOMIC_READ: 13096 case OMP_ATOMIC_CAPTURE_OLD: 13097 case OMP_ATOMIC_CAPTURE_NEW: 13098 ret = gimplify_omp_atomic (expr_p, pre_p); 13099 break; 13100 13101 case TRANSACTION_EXPR: 13102 ret = gimplify_transaction (expr_p, pre_p); 13103 break; 13104 13105 case TRUTH_AND_EXPR: 13106 case TRUTH_OR_EXPR: 13107 case TRUTH_XOR_EXPR: 13108 { 13109 tree orig_type = TREE_TYPE (*expr_p); 13110 tree new_type, xop0, xop1; 13111 *expr_p = gimple_boolify (*expr_p); 13112 new_type = TREE_TYPE (*expr_p); 13113 if (!useless_type_conversion_p (orig_type, new_type)) 13114 { 13115 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p); 13116 ret = GS_OK; 13117 break; 13118 } 13119 13120 /* Boolified binary truth expressions are semantically equivalent 13121 to bitwise binary expressions. Canonicalize them to the 13122 bitwise variant. */ 13123 switch (TREE_CODE (*expr_p)) 13124 { 13125 case TRUTH_AND_EXPR: 13126 TREE_SET_CODE (*expr_p, BIT_AND_EXPR); 13127 break; 13128 case TRUTH_OR_EXPR: 13129 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR); 13130 break; 13131 case TRUTH_XOR_EXPR: 13132 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR); 13133 break; 13134 default: 13135 break; 13136 } 13137 /* Now make sure that operands have compatible type to 13138 expression's new_type. */ 13139 xop0 = TREE_OPERAND (*expr_p, 0); 13140 xop1 = TREE_OPERAND (*expr_p, 1); 13141 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0))) 13142 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location, 13143 new_type, 13144 xop0); 13145 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1))) 13146 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location, 13147 new_type, 13148 xop1); 13149 /* Continue classified as tcc_binary. */ 13150 goto expr_2; 13151 } 13152 13153 case VEC_COND_EXPR: 13154 { 13155 enum gimplify_status r0, r1, r2; 13156 13157 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 13158 post_p, is_gimple_condexpr, fb_rvalue); 13159 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 13160 post_p, is_gimple_val, fb_rvalue); 13161 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, 13162 post_p, is_gimple_val, fb_rvalue); 13163 13164 ret = MIN (MIN (r0, r1), r2); 13165 recalculate_side_effects (*expr_p); 13166 } 13167 break; 13168 13169 case VEC_PERM_EXPR: 13170 /* Classified as tcc_expression. */ 13171 goto expr_3; 13172 13173 case BIT_INSERT_EXPR: 13174 /* Argument 3 is a constant. */ 13175 goto expr_2; 13176 13177 case POINTER_PLUS_EXPR: 13178 { 13179 enum gimplify_status r0, r1; 13180 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 13181 post_p, is_gimple_val, fb_rvalue); 13182 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 13183 post_p, is_gimple_val, fb_rvalue); 13184 recalculate_side_effects (*expr_p); 13185 ret = MIN (r0, r1); 13186 break; 13187 } 13188 13189 default: 13190 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p))) 13191 { 13192 case tcc_comparison: 13193 /* Handle comparison of objects of non scalar mode aggregates 13194 with a call to memcmp. It would be nice to only have to do 13195 this for variable-sized objects, but then we'd have to allow 13196 the same nest of reference nodes we allow for MODIFY_EXPR and 13197 that's too complex. 13198 13199 Compare scalar mode aggregates as scalar mode values. Using 13200 memcmp for them would be very inefficient at best, and is 13201 plain wrong if bitfields are involved. */ 13202 { 13203 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1)); 13204 13205 /* Vector comparisons need no boolification. */ 13206 if (TREE_CODE (type) == VECTOR_TYPE) 13207 goto expr_2; 13208 else if (!AGGREGATE_TYPE_P (type)) 13209 { 13210 tree org_type = TREE_TYPE (*expr_p); 13211 *expr_p = gimple_boolify (*expr_p); 13212 if (!useless_type_conversion_p (org_type, 13213 TREE_TYPE (*expr_p))) 13214 { 13215 *expr_p = fold_convert_loc (input_location, 13216 org_type, *expr_p); 13217 ret = GS_OK; 13218 } 13219 else 13220 goto expr_2; 13221 } 13222 else if (TYPE_MODE (type) != BLKmode) 13223 ret = gimplify_scalar_mode_aggregate_compare (expr_p); 13224 else 13225 ret = gimplify_variable_sized_compare (expr_p); 13226 13227 break; 13228 } 13229 13230 /* If *EXPR_P does not need to be special-cased, handle it 13231 according to its class. */ 13232 case tcc_unary: 13233 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 13234 post_p, is_gimple_val, fb_rvalue); 13235 break; 13236 13237 case tcc_binary: 13238 expr_2: 13239 { 13240 enum gimplify_status r0, r1; 13241 13242 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 13243 post_p, is_gimple_val, fb_rvalue); 13244 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 13245 post_p, is_gimple_val, fb_rvalue); 13246 13247 ret = MIN (r0, r1); 13248 break; 13249 } 13250 13251 expr_3: 13252 { 13253 enum gimplify_status r0, r1, r2; 13254 13255 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 13256 post_p, is_gimple_val, fb_rvalue); 13257 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 13258 post_p, is_gimple_val, fb_rvalue); 13259 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, 13260 post_p, is_gimple_val, fb_rvalue); 13261 13262 ret = MIN (MIN (r0, r1), r2); 13263 break; 13264 } 13265 13266 case tcc_declaration: 13267 case tcc_constant: 13268 ret = GS_ALL_DONE; 13269 goto dont_recalculate; 13270 13271 default: 13272 gcc_unreachable (); 13273 } 13274 13275 recalculate_side_effects (*expr_p); 13276 13277 dont_recalculate: 13278 break; 13279 } 13280 13281 gcc_assert (*expr_p || ret != GS_OK); 13282 } 13283 while (ret == GS_OK); 13284 13285 /* If we encountered an error_mark somewhere nested inside, either 13286 stub out the statement or propagate the error back out. */ 13287 if (ret == GS_ERROR) 13288 { 13289 if (is_statement) 13290 *expr_p = NULL; 13291 goto out; 13292 } 13293 13294 /* This was only valid as a return value from the langhook, which 13295 we handled. Make sure it doesn't escape from any other context. */ 13296 gcc_assert (ret != GS_UNHANDLED); 13297 13298 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p)) 13299 { 13300 /* We aren't looking for a value, and we don't have a valid 13301 statement. If it doesn't have side-effects, throw it away. 13302 We can also get here with code such as "*&&L;", where L is 13303 a LABEL_DECL that is marked as FORCED_LABEL. */ 13304 if (TREE_CODE (*expr_p) == LABEL_DECL 13305 || !TREE_SIDE_EFFECTS (*expr_p)) 13306 *expr_p = NULL; 13307 else if (!TREE_THIS_VOLATILE (*expr_p)) 13308 { 13309 /* This is probably a _REF that contains something nested that 13310 has side effects. Recurse through the operands to find it. */ 13311 enum tree_code code = TREE_CODE (*expr_p); 13312 13313 switch (code) 13314 { 13315 case COMPONENT_REF: 13316 case REALPART_EXPR: 13317 case IMAGPART_EXPR: 13318 case VIEW_CONVERT_EXPR: 13319 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 13320 gimple_test_f, fallback); 13321 break; 13322 13323 case ARRAY_REF: 13324 case ARRAY_RANGE_REF: 13325 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 13326 gimple_test_f, fallback); 13327 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, 13328 gimple_test_f, fallback); 13329 break; 13330 13331 default: 13332 /* Anything else with side-effects must be converted to 13333 a valid statement before we get here. */ 13334 gcc_unreachable (); 13335 } 13336 13337 *expr_p = NULL; 13338 } 13339 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p)) 13340 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode) 13341 { 13342 /* Historically, the compiler has treated a bare reference 13343 to a non-BLKmode volatile lvalue as forcing a load. */ 13344 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p)); 13345 13346 /* Normally, we do not want to create a temporary for a 13347 TREE_ADDRESSABLE type because such a type should not be 13348 copied by bitwise-assignment. However, we make an 13349 exception here, as all we are doing here is ensuring that 13350 we read the bytes that make up the type. We use 13351 create_tmp_var_raw because create_tmp_var will abort when 13352 given a TREE_ADDRESSABLE type. */ 13353 tree tmp = create_tmp_var_raw (type, "vol"); 13354 gimple_add_tmp_var (tmp); 13355 gimplify_assign (tmp, *expr_p, pre_p); 13356 *expr_p = NULL; 13357 } 13358 else 13359 /* We can't do anything useful with a volatile reference to 13360 an incomplete type, so just throw it away. Likewise for 13361 a BLKmode type, since any implicit inner load should 13362 already have been turned into an explicit one by the 13363 gimplification process. */ 13364 *expr_p = NULL; 13365 } 13366 13367 /* If we are gimplifying at the statement level, we're done. Tack 13368 everything together and return. */ 13369 if (fallback == fb_none || is_statement) 13370 { 13371 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear 13372 it out for GC to reclaim it. */ 13373 *expr_p = NULL_TREE; 13374 13375 if (!gimple_seq_empty_p (internal_pre) 13376 || !gimple_seq_empty_p (internal_post)) 13377 { 13378 gimplify_seq_add_seq (&internal_pre, internal_post); 13379 gimplify_seq_add_seq (pre_p, internal_pre); 13380 } 13381 13382 /* The result of gimplifying *EXPR_P is going to be the last few 13383 statements in *PRE_P and *POST_P. Add location information 13384 to all the statements that were added by the gimplification 13385 helpers. */ 13386 if (!gimple_seq_empty_p (*pre_p)) 13387 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location); 13388 13389 if (!gimple_seq_empty_p (*post_p)) 13390 annotate_all_with_location_after (*post_p, post_last_gsi, 13391 input_location); 13392 13393 goto out; 13394 } 13395 13396 #ifdef ENABLE_GIMPLE_CHECKING 13397 if (*expr_p) 13398 { 13399 enum tree_code code = TREE_CODE (*expr_p); 13400 /* These expressions should already be in gimple IR form. */ 13401 gcc_assert (code != MODIFY_EXPR 13402 && code != ASM_EXPR 13403 && code != BIND_EXPR 13404 && code != CATCH_EXPR 13405 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr) 13406 && code != EH_FILTER_EXPR 13407 && code != GOTO_EXPR 13408 && code != LABEL_EXPR 13409 && code != LOOP_EXPR 13410 && code != SWITCH_EXPR 13411 && code != TRY_FINALLY_EXPR 13412 && code != OACC_PARALLEL 13413 && code != OACC_KERNELS 13414 && code != OACC_DATA 13415 && code != OACC_HOST_DATA 13416 && code != OACC_DECLARE 13417 && code != OACC_UPDATE 13418 && code != OACC_ENTER_DATA 13419 && code != OACC_EXIT_DATA 13420 && code != OACC_CACHE 13421 && code != OMP_CRITICAL 13422 && code != OMP_FOR 13423 && code != OACC_LOOP 13424 && code != OMP_MASTER 13425 && code != OMP_TASKGROUP 13426 && code != OMP_ORDERED 13427 && code != OMP_PARALLEL 13428 && code != OMP_SECTIONS 13429 && code != OMP_SECTION 13430 && code != OMP_SINGLE); 13431 } 13432 #endif 13433 13434 /* Otherwise we're gimplifying a subexpression, so the resulting 13435 value is interesting. If it's a valid operand that matches 13436 GIMPLE_TEST_F, we're done. Unless we are handling some 13437 post-effects internally; if that's the case, we need to copy into 13438 a temporary before adding the post-effects to POST_P. */ 13439 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p)) 13440 goto out; 13441 13442 /* Otherwise, we need to create a new temporary for the gimplified 13443 expression. */ 13444 13445 /* We can't return an lvalue if we have an internal postqueue. The 13446 object the lvalue refers to would (probably) be modified by the 13447 postqueue; we need to copy the value out first, which means an 13448 rvalue. */ 13449 if ((fallback & fb_lvalue) 13450 && gimple_seq_empty_p (internal_post) 13451 && is_gimple_addressable (*expr_p)) 13452 { 13453 /* An lvalue will do. Take the address of the expression, store it 13454 in a temporary, and replace the expression with an INDIRECT_REF of 13455 that temporary. */ 13456 tree ref_alias_type = reference_alias_ptr_type (*expr_p); 13457 unsigned int ref_align = get_object_alignment (*expr_p); 13458 tree ref_type = TREE_TYPE (*expr_p); 13459 tmp = build_fold_addr_expr_loc (input_location, *expr_p); 13460 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue); 13461 if (TYPE_ALIGN (ref_type) != ref_align) 13462 ref_type = build_aligned_type (ref_type, ref_align); 13463 *expr_p = build2 (MEM_REF, ref_type, 13464 tmp, build_zero_cst (ref_alias_type)); 13465 } 13466 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p)) 13467 { 13468 /* An rvalue will do. Assign the gimplified expression into a 13469 new temporary TMP and replace the original expression with 13470 TMP. First, make sure that the expression has a type so that 13471 it can be assigned into a temporary. */ 13472 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p))); 13473 *expr_p = get_formal_tmp_var (*expr_p, pre_p); 13474 } 13475 else 13476 { 13477 #ifdef ENABLE_GIMPLE_CHECKING 13478 if (!(fallback & fb_mayfail)) 13479 { 13480 fprintf (stderr, "gimplification failed:\n"); 13481 print_generic_expr (stderr, *expr_p); 13482 debug_tree (*expr_p); 13483 internal_error ("gimplification failed"); 13484 } 13485 #endif 13486 gcc_assert (fallback & fb_mayfail); 13487 13488 /* If this is an asm statement, and the user asked for the 13489 impossible, don't die. Fail and let gimplify_asm_expr 13490 issue an error. */ 13491 ret = GS_ERROR; 13492 goto out; 13493 } 13494 13495 /* Make sure the temporary matches our predicate. */ 13496 gcc_assert ((*gimple_test_f) (*expr_p)); 13497 13498 if (!gimple_seq_empty_p (internal_post)) 13499 { 13500 annotate_all_with_location (internal_post, input_location); 13501 gimplify_seq_add_seq (pre_p, internal_post); 13502 } 13503 13504 out: 13505 input_location = saved_location; 13506 return ret; 13507 } 13508 13509 /* Like gimplify_expr but make sure the gimplified result is not itself 13510 a SSA name (but a decl if it were). Temporaries required by 13511 evaluating *EXPR_P may be still SSA names. */ 13512 13513 static enum gimplify_status 13514 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 13515 bool (*gimple_test_f) (tree), fallback_t fallback, 13516 bool allow_ssa) 13517 { 13518 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME; 13519 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p, 13520 gimple_test_f, fallback); 13521 if (! allow_ssa 13522 && TREE_CODE (*expr_p) == SSA_NAME) 13523 { 13524 tree name = *expr_p; 13525 if (was_ssa_name_p) 13526 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false); 13527 else 13528 { 13529 /* Avoid the extra copy if possible. */ 13530 *expr_p = create_tmp_reg (TREE_TYPE (name)); 13531 if (!gimple_nop_p (SSA_NAME_DEF_STMT (name))) 13532 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p); 13533 release_ssa_name (name); 13534 } 13535 } 13536 return ret; 13537 } 13538 13539 /* Look through TYPE for variable-sized objects and gimplify each such 13540 size that we find. Add to LIST_P any statements generated. */ 13541 13542 void 13543 gimplify_type_sizes (tree type, gimple_seq *list_p) 13544 { 13545 tree field, t; 13546 13547 if (type == NULL || type == error_mark_node) 13548 return; 13549 13550 /* We first do the main variant, then copy into any other variants. */ 13551 type = TYPE_MAIN_VARIANT (type); 13552 13553 /* Avoid infinite recursion. */ 13554 if (TYPE_SIZES_GIMPLIFIED (type)) 13555 return; 13556 13557 TYPE_SIZES_GIMPLIFIED (type) = 1; 13558 13559 switch (TREE_CODE (type)) 13560 { 13561 case INTEGER_TYPE: 13562 case ENUMERAL_TYPE: 13563 case BOOLEAN_TYPE: 13564 case REAL_TYPE: 13565 case FIXED_POINT_TYPE: 13566 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p); 13567 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p); 13568 13569 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) 13570 { 13571 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type); 13572 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type); 13573 } 13574 break; 13575 13576 case ARRAY_TYPE: 13577 /* These types may not have declarations, so handle them here. */ 13578 gimplify_type_sizes (TREE_TYPE (type), list_p); 13579 gimplify_type_sizes (TYPE_DOMAIN (type), list_p); 13580 /* Ensure VLA bounds aren't removed, for -O0 they should be variables 13581 with assigned stack slots, for -O1+ -g they should be tracked 13582 by VTA. */ 13583 if (!(TYPE_NAME (type) 13584 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL 13585 && DECL_IGNORED_P (TYPE_NAME (type))) 13586 && TYPE_DOMAIN (type) 13587 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type))) 13588 { 13589 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type)); 13590 if (t && VAR_P (t) && DECL_ARTIFICIAL (t)) 13591 DECL_IGNORED_P (t) = 0; 13592 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); 13593 if (t && VAR_P (t) && DECL_ARTIFICIAL (t)) 13594 DECL_IGNORED_P (t) = 0; 13595 } 13596 break; 13597 13598 case RECORD_TYPE: 13599 case UNION_TYPE: 13600 case QUAL_UNION_TYPE: 13601 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) 13602 if (TREE_CODE (field) == FIELD_DECL) 13603 { 13604 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p); 13605 gimplify_one_sizepos (&DECL_SIZE (field), list_p); 13606 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p); 13607 gimplify_type_sizes (TREE_TYPE (field), list_p); 13608 } 13609 break; 13610 13611 case POINTER_TYPE: 13612 case REFERENCE_TYPE: 13613 /* We used to recurse on the pointed-to type here, which turned out to 13614 be incorrect because its definition might refer to variables not 13615 yet initialized at this point if a forward declaration is involved. 13616 13617 It was actually useful for anonymous pointed-to types to ensure 13618 that the sizes evaluation dominates every possible later use of the 13619 values. Restricting to such types here would be safe since there 13620 is no possible forward declaration around, but would introduce an 13621 undesirable middle-end semantic to anonymity. We then defer to 13622 front-ends the responsibility of ensuring that the sizes are 13623 evaluated both early and late enough, e.g. by attaching artificial 13624 type declarations to the tree. */ 13625 break; 13626 13627 default: 13628 break; 13629 } 13630 13631 gimplify_one_sizepos (&TYPE_SIZE (type), list_p); 13632 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p); 13633 13634 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) 13635 { 13636 TYPE_SIZE (t) = TYPE_SIZE (type); 13637 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type); 13638 TYPE_SIZES_GIMPLIFIED (t) = 1; 13639 } 13640 } 13641 13642 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P, 13643 a size or position, has had all of its SAVE_EXPRs evaluated. 13644 We add any required statements to *STMT_P. */ 13645 13646 void 13647 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p) 13648 { 13649 tree expr = *expr_p; 13650 13651 /* We don't do anything if the value isn't there, is constant, or contains 13652 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already 13653 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier 13654 will want to replace it with a new variable, but that will cause problems 13655 if this type is from outside the function. It's OK to have that here. */ 13656 if (expr == NULL_TREE 13657 || is_gimple_constant (expr) 13658 || TREE_CODE (expr) == VAR_DECL 13659 || CONTAINS_PLACEHOLDER_P (expr)) 13660 return; 13661 13662 *expr_p = unshare_expr (expr); 13663 13664 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed 13665 if the def vanishes. */ 13666 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false); 13667 13668 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the 13669 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls 13670 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */ 13671 if (is_gimple_constant (*expr_p)) 13672 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false); 13673 } 13674 13675 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node 13676 containing the sequence of corresponding GIMPLE statements. If DO_PARMS 13677 is true, also gimplify the parameters. */ 13678 13679 gbind * 13680 gimplify_body (tree fndecl, bool do_parms) 13681 { 13682 location_t saved_location = input_location; 13683 gimple_seq parm_stmts, parm_cleanup = NULL, seq; 13684 gimple *outer_stmt; 13685 gbind *outer_bind; 13686 13687 timevar_push (TV_TREE_GIMPLIFY); 13688 13689 init_tree_ssa (cfun); 13690 13691 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during 13692 gimplification. */ 13693 default_rtl_profile (); 13694 13695 gcc_assert (gimplify_ctxp == NULL); 13696 push_gimplify_context (true); 13697 13698 if (flag_openacc || flag_openmp) 13699 { 13700 gcc_assert (gimplify_omp_ctxp == NULL); 13701 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl))) 13702 gimplify_omp_ctxp = new_omp_context (ORT_TARGET); 13703 } 13704 13705 /* Unshare most shared trees in the body and in that of any nested functions. 13706 It would seem we don't have to do this for nested functions because 13707 they are supposed to be output and then the outer function gimplified 13708 first, but the g++ front end doesn't always do it that way. */ 13709 unshare_body (fndecl); 13710 unvisit_body (fndecl); 13711 13712 /* Make sure input_location isn't set to something weird. */ 13713 input_location = DECL_SOURCE_LOCATION (fndecl); 13714 13715 /* Resolve callee-copies. This has to be done before processing 13716 the body so that DECL_VALUE_EXPR gets processed correctly. */ 13717 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL; 13718 13719 /* Gimplify the function's body. */ 13720 seq = NULL; 13721 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq); 13722 outer_stmt = gimple_seq_first_stmt (seq); 13723 if (!outer_stmt) 13724 { 13725 outer_stmt = gimple_build_nop (); 13726 gimplify_seq_add_stmt (&seq, outer_stmt); 13727 } 13728 13729 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is 13730 not the case, wrap everything in a GIMPLE_BIND to make it so. */ 13731 if (gimple_code (outer_stmt) == GIMPLE_BIND 13732 && gimple_seq_first (seq) == gimple_seq_last (seq)) 13733 outer_bind = as_a <gbind *> (outer_stmt); 13734 else 13735 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL); 13736 13737 DECL_SAVED_TREE (fndecl) = NULL_TREE; 13738 13739 /* If we had callee-copies statements, insert them at the beginning 13740 of the function and clear DECL_VALUE_EXPR_P on the parameters. */ 13741 if (!gimple_seq_empty_p (parm_stmts)) 13742 { 13743 tree parm; 13744 13745 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind)); 13746 if (parm_cleanup) 13747 { 13748 gtry *g = gimple_build_try (parm_stmts, parm_cleanup, 13749 GIMPLE_TRY_FINALLY); 13750 parm_stmts = NULL; 13751 gimple_seq_add_stmt (&parm_stmts, g); 13752 } 13753 gimple_bind_set_body (outer_bind, parm_stmts); 13754 13755 for (parm = DECL_ARGUMENTS (current_function_decl); 13756 parm; parm = DECL_CHAIN (parm)) 13757 if (DECL_HAS_VALUE_EXPR_P (parm)) 13758 { 13759 DECL_HAS_VALUE_EXPR_P (parm) = 0; 13760 DECL_IGNORED_P (parm) = 0; 13761 } 13762 } 13763 13764 if ((flag_openacc || flag_openmp || flag_openmp_simd) 13765 && gimplify_omp_ctxp) 13766 { 13767 delete_omp_context (gimplify_omp_ctxp); 13768 gimplify_omp_ctxp = NULL; 13769 } 13770 13771 pop_gimplify_context (outer_bind); 13772 gcc_assert (gimplify_ctxp == NULL); 13773 13774 if (flag_checking && !seen_error ()) 13775 verify_gimple_in_seq (gimple_bind_body (outer_bind)); 13776 13777 timevar_pop (TV_TREE_GIMPLIFY); 13778 input_location = saved_location; 13779 13780 return outer_bind; 13781 } 13782 13783 typedef char *char_p; /* For DEF_VEC_P. */ 13784 13785 /* Return whether we should exclude FNDECL from instrumentation. */ 13786 13787 static bool 13788 flag_instrument_functions_exclude_p (tree fndecl) 13789 { 13790 vec<char_p> *v; 13791 13792 v = (vec<char_p> *) flag_instrument_functions_exclude_functions; 13793 if (v && v->length () > 0) 13794 { 13795 const char *name; 13796 int i; 13797 char *s; 13798 13799 name = lang_hooks.decl_printable_name (fndecl, 0); 13800 FOR_EACH_VEC_ELT (*v, i, s) 13801 if (strstr (name, s) != NULL) 13802 return true; 13803 } 13804 13805 v = (vec<char_p> *) flag_instrument_functions_exclude_files; 13806 if (v && v->length () > 0) 13807 { 13808 const char *name; 13809 int i; 13810 char *s; 13811 13812 name = DECL_SOURCE_FILE (fndecl); 13813 FOR_EACH_VEC_ELT (*v, i, s) 13814 if (strstr (name, s) != NULL) 13815 return true; 13816 } 13817 13818 return false; 13819 } 13820 13821 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL 13822 node for the function we want to gimplify. 13823 13824 Return the sequence of GIMPLE statements corresponding to the body 13825 of FNDECL. */ 13826 13827 void 13828 gimplify_function_tree (tree fndecl) 13829 { 13830 tree parm, ret; 13831 gimple_seq seq; 13832 gbind *bind; 13833 13834 gcc_assert (!gimple_body (fndecl)); 13835 13836 if (DECL_STRUCT_FUNCTION (fndecl)) 13837 push_cfun (DECL_STRUCT_FUNCTION (fndecl)); 13838 else 13839 push_struct_function (fndecl); 13840 13841 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr 13842 if necessary. */ 13843 cfun->curr_properties |= PROP_gimple_lva; 13844 13845 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm)) 13846 { 13847 /* Preliminarily mark non-addressed complex variables as eligible 13848 for promotion to gimple registers. We'll transform their uses 13849 as we find them. */ 13850 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE 13851 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE) 13852 && !TREE_THIS_VOLATILE (parm) 13853 && !needs_to_live_in_memory (parm)) 13854 DECL_GIMPLE_REG_P (parm) = 1; 13855 } 13856 13857 ret = DECL_RESULT (fndecl); 13858 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE 13859 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE) 13860 && !needs_to_live_in_memory (ret)) 13861 DECL_GIMPLE_REG_P (ret) = 1; 13862 13863 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS)) 13864 asan_poisoned_variables = new hash_set<tree> (); 13865 bind = gimplify_body (fndecl, true); 13866 if (asan_poisoned_variables) 13867 { 13868 delete asan_poisoned_variables; 13869 asan_poisoned_variables = NULL; 13870 } 13871 13872 /* The tree body of the function is no longer needed, replace it 13873 with the new GIMPLE body. */ 13874 seq = NULL; 13875 gimple_seq_add_stmt (&seq, bind); 13876 gimple_set_body (fndecl, seq); 13877 13878 /* If we're instrumenting function entry/exit, then prepend the call to 13879 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to 13880 catch the exit hook. */ 13881 /* ??? Add some way to ignore exceptions for this TFE. */ 13882 if (flag_instrument_function_entry_exit 13883 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) 13884 /* Do not instrument extern inline functions. */ 13885 && !(DECL_DECLARED_INLINE_P (fndecl) 13886 && DECL_EXTERNAL (fndecl) 13887 && DECL_DISREGARD_INLINE_LIMITS (fndecl)) 13888 && !flag_instrument_functions_exclude_p (fndecl)) 13889 { 13890 tree x; 13891 gbind *new_bind; 13892 gimple *tf; 13893 gimple_seq cleanup = NULL, body = NULL; 13894 tree tmp_var, this_fn_addr; 13895 gcall *call; 13896 13897 /* The instrumentation hooks aren't going to call the instrumented 13898 function and the address they receive is expected to be matchable 13899 against symbol addresses. Make sure we don't create a trampoline, 13900 in case the current function is nested. */ 13901 this_fn_addr = build_fold_addr_expr (current_function_decl); 13902 TREE_NO_TRAMPOLINE (this_fn_addr) = 1; 13903 13904 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS); 13905 call = gimple_build_call (x, 1, integer_zero_node); 13906 tmp_var = create_tmp_var (ptr_type_node, "return_addr"); 13907 gimple_call_set_lhs (call, tmp_var); 13908 gimplify_seq_add_stmt (&cleanup, call); 13909 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT); 13910 call = gimple_build_call (x, 2, this_fn_addr, tmp_var); 13911 gimplify_seq_add_stmt (&cleanup, call); 13912 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY); 13913 13914 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS); 13915 call = gimple_build_call (x, 1, integer_zero_node); 13916 tmp_var = create_tmp_var (ptr_type_node, "return_addr"); 13917 gimple_call_set_lhs (call, tmp_var); 13918 gimplify_seq_add_stmt (&body, call); 13919 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER); 13920 call = gimple_build_call (x, 2, this_fn_addr, tmp_var); 13921 gimplify_seq_add_stmt (&body, call); 13922 gimplify_seq_add_stmt (&body, tf); 13923 new_bind = gimple_build_bind (NULL, body, NULL); 13924 13925 /* Replace the current function body with the body 13926 wrapped in the try/finally TF. */ 13927 seq = NULL; 13928 gimple_seq_add_stmt (&seq, new_bind); 13929 gimple_set_body (fndecl, seq); 13930 bind = new_bind; 13931 } 13932 13933 if (sanitize_flags_p (SANITIZE_THREAD)) 13934 { 13935 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0); 13936 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY); 13937 gbind *new_bind = gimple_build_bind (NULL, tf, NULL); 13938 /* Replace the current function body with the body 13939 wrapped in the try/finally TF. */ 13940 seq = NULL; 13941 gimple_seq_add_stmt (&seq, new_bind); 13942 gimple_set_body (fndecl, seq); 13943 } 13944 13945 DECL_SAVED_TREE (fndecl) = NULL_TREE; 13946 cfun->curr_properties |= PROP_gimple_any; 13947 13948 pop_cfun (); 13949 13950 dump_function (TDI_gimple, fndecl); 13951 } 13952 13953 /* Return a dummy expression of type TYPE in order to keep going after an 13954 error. */ 13955 13956 static tree 13957 dummy_object (tree type) 13958 { 13959 tree t = build_int_cst (build_pointer_type (type), 0); 13960 return build2 (MEM_REF, type, t, t); 13961 } 13962 13963 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a 13964 builtin function, but a very special sort of operator. */ 13965 13966 enum gimplify_status 13967 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, 13968 gimple_seq *post_p ATTRIBUTE_UNUSED) 13969 { 13970 tree promoted_type, have_va_type; 13971 tree valist = TREE_OPERAND (*expr_p, 0); 13972 tree type = TREE_TYPE (*expr_p); 13973 tree t, tag, aptag; 13974 location_t loc = EXPR_LOCATION (*expr_p); 13975 13976 /* Verify that valist is of the proper type. */ 13977 have_va_type = TREE_TYPE (valist); 13978 if (have_va_type == error_mark_node) 13979 return GS_ERROR; 13980 have_va_type = targetm.canonical_va_list_type (have_va_type); 13981 if (have_va_type == NULL_TREE 13982 && POINTER_TYPE_P (TREE_TYPE (valist))) 13983 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */ 13984 have_va_type 13985 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist))); 13986 gcc_assert (have_va_type != NULL_TREE); 13987 13988 /* Generate a diagnostic for requesting data of a type that cannot 13989 be passed through `...' due to type promotion at the call site. */ 13990 if ((promoted_type = lang_hooks.types.type_promotes_to (type)) 13991 != type) 13992 { 13993 static bool gave_help; 13994 bool warned; 13995 /* Use the expansion point to handle cases such as passing bool (defined 13996 in a system header) through `...'. */ 13997 location_t xloc 13998 = expansion_point_location_if_in_system_header (loc); 13999 14000 /* Unfortunately, this is merely undefined, rather than a constraint 14001 violation, so we cannot make this an error. If this call is never 14002 executed, the program is still strictly conforming. */ 14003 auto_diagnostic_group d; 14004 warned = warning_at (xloc, 0, 14005 "%qT is promoted to %qT when passed through %<...%>", 14006 type, promoted_type); 14007 if (!gave_help && warned) 14008 { 14009 gave_help = true; 14010 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)", 14011 promoted_type, type); 14012 } 14013 14014 /* We can, however, treat "undefined" any way we please. 14015 Call abort to encourage the user to fix the program. */ 14016 if (warned) 14017 inform (xloc, "if this code is reached, the program will abort"); 14018 /* Before the abort, allow the evaluation of the va_list 14019 expression to exit or longjmp. */ 14020 gimplify_and_add (valist, pre_p); 14021 t = build_call_expr_loc (loc, 14022 builtin_decl_implicit (BUILT_IN_TRAP), 0); 14023 gimplify_and_add (t, pre_p); 14024 14025 /* This is dead code, but go ahead and finish so that the 14026 mode of the result comes out right. */ 14027 *expr_p = dummy_object (type); 14028 return GS_ALL_DONE; 14029 } 14030 14031 tag = build_int_cst (build_pointer_type (type), 0); 14032 aptag = build_int_cst (TREE_TYPE (valist), 0); 14033 14034 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3, 14035 valist, tag, aptag); 14036 14037 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG 14038 needs to be expanded. */ 14039 cfun->curr_properties &= ~PROP_gimple_lva; 14040 14041 return GS_OK; 14042 } 14043 14044 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P. 14045 14046 DST/SRC are the destination and source respectively. You can pass 14047 ungimplified trees in DST or SRC, in which case they will be 14048 converted to a gimple operand if necessary. 14049 14050 This function returns the newly created GIMPLE_ASSIGN tuple. */ 14051 14052 gimple * 14053 gimplify_assign (tree dst, tree src, gimple_seq *seq_p) 14054 { 14055 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); 14056 gimplify_and_add (t, seq_p); 14057 ggc_free (t); 14058 return gimple_seq_last_stmt (*seq_p); 14059 } 14060 14061 inline hashval_t 14062 gimplify_hasher::hash (const elt_t *p) 14063 { 14064 tree t = p->val; 14065 return iterative_hash_expr (t, 0); 14066 } 14067 14068 inline bool 14069 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2) 14070 { 14071 tree t1 = p1->val; 14072 tree t2 = p2->val; 14073 enum tree_code code = TREE_CODE (t1); 14074 14075 if (TREE_CODE (t2) != code 14076 || TREE_TYPE (t1) != TREE_TYPE (t2)) 14077 return false; 14078 14079 if (!operand_equal_p (t1, t2, 0)) 14080 return false; 14081 14082 /* Only allow them to compare equal if they also hash equal; otherwise 14083 results are nondeterminate, and we fail bootstrap comparison. */ 14084 gcc_checking_assert (hash (p1) == hash (p2)); 14085 14086 return true; 14087 } 14088