1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees 2 tree representation into the GIMPLE form. 3 Copyright (C) 2002-2015 Free Software Foundation, Inc. 4 Major work done by Sebastian Pop <s.pop@laposte.net>, 5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>. 6 7 This file is part of GCC. 8 9 GCC is free software; you can redistribute it and/or modify it under 10 the terms of the GNU General Public License as published by the Free 11 Software Foundation; either version 3, or (at your option) any later 12 version. 13 14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 15 WARRANTY; without even the implied warranty of MERCHANTABILITY or 16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 17 for more details. 18 19 You should have received a copy of the GNU General Public License 20 along with GCC; see the file COPYING3. If not see 21 <http://www.gnu.org/licenses/>. */ 22 23 #include "config.h" 24 #include "system.h" 25 #include "coretypes.h" 26 #include "hash-set.h" 27 #include "machmode.h" 28 #include "vec.h" 29 #include "double-int.h" 30 #include "input.h" 31 #include "alias.h" 32 #include "symtab.h" 33 #include "options.h" 34 #include "wide-int.h" 35 #include "inchash.h" 36 #include "tree.h" 37 #include "fold-const.h" 38 #include "hashtab.h" 39 #include "tm.h" 40 #include "hard-reg-set.h" 41 #include "function.h" 42 #include "rtl.h" 43 #include "flags.h" 44 #include "statistics.h" 45 #include "real.h" 46 #include "fixed-value.h" 47 #include "insn-config.h" 48 #include "expmed.h" 49 #include "dojump.h" 50 #include "explow.h" 51 #include "calls.h" 52 #include "emit-rtl.h" 53 #include "varasm.h" 54 #include "stmt.h" 55 #include "expr.h" 56 #include "predict.h" 57 #include "basic-block.h" 58 #include "tree-ssa-alias.h" 59 #include "internal-fn.h" 60 #include "gimple-fold.h" 61 #include "tree-eh.h" 62 #include "gimple-expr.h" 63 #include "is-a.h" 64 #include "gimple.h" 65 #include "gimplify.h" 66 #include "gimple-iterator.h" 67 #include "stringpool.h" 68 #include "stor-layout.h" 69 #include "print-tree.h" 70 #include "tree-iterator.h" 71 #include "tree-inline.h" 72 #include "tree-pretty-print.h" 73 #include "langhooks.h" 74 #include "bitmap.h" 75 #include "gimple-ssa.h" 76 #include "hash-map.h" 77 #include "plugin-api.h" 78 #include "ipa-ref.h" 79 #include "cgraph.h" 80 #include "tree-cfg.h" 81 #include "tree-ssanames.h" 82 #include "tree-ssa.h" 83 #include "diagnostic-core.h" 84 #include "target.h" 85 #include "splay-tree.h" 86 #include "omp-low.h" 87 #include "gimple-low.h" 88 #include "cilk.h" 89 #include "gomp-constants.h" 90 91 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */ 92 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */ 93 #include "builtins.h" 94 95 enum gimplify_omp_var_data 96 { 97 GOVD_SEEN = 1, 98 GOVD_EXPLICIT = 2, 99 GOVD_SHARED = 4, 100 GOVD_PRIVATE = 8, 101 GOVD_FIRSTPRIVATE = 16, 102 GOVD_LASTPRIVATE = 32, 103 GOVD_REDUCTION = 64, 104 GOVD_LOCAL = 128, 105 GOVD_MAP = 256, 106 GOVD_DEBUG_PRIVATE = 512, 107 GOVD_PRIVATE_OUTER_REF = 1024, 108 GOVD_LINEAR = 2048, 109 GOVD_ALIGNED = 4096, 110 111 /* Flag for GOVD_MAP: don't copy back. */ 112 GOVD_MAP_TO_ONLY = 8192, 113 114 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */ 115 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384, 116 117 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE 118 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR 119 | GOVD_LOCAL) 120 }; 121 122 123 enum omp_region_type 124 { 125 ORT_WORKSHARE = 0, 126 ORT_SIMD = 1, 127 ORT_PARALLEL = 2, 128 ORT_COMBINED_PARALLEL = 3, 129 ORT_TASK = 4, 130 ORT_UNTIED_TASK = 5, 131 ORT_TEAMS = 8, 132 ORT_COMBINED_TEAMS = 9, 133 /* Data region. */ 134 ORT_TARGET_DATA = 16, 135 /* Data region with offloading. */ 136 ORT_TARGET = 32 137 }; 138 139 /* Gimplify hashtable helper. */ 140 141 struct gimplify_hasher : typed_free_remove <elt_t> 142 { 143 typedef elt_t value_type; 144 typedef elt_t compare_type; 145 static inline hashval_t hash (const value_type *); 146 static inline bool equal (const value_type *, const compare_type *); 147 }; 148 149 struct gimplify_ctx 150 { 151 struct gimplify_ctx *prev_context; 152 153 vec<gbind *> bind_expr_stack; 154 tree temps; 155 gimple_seq conditional_cleanups; 156 tree exit_label; 157 tree return_temp; 158 159 vec<tree> case_labels; 160 /* The formal temporary table. Should this be persistent? */ 161 hash_table<gimplify_hasher> *temp_htab; 162 163 int conditions; 164 bool save_stack; 165 bool into_ssa; 166 bool allow_rhs_cond_expr; 167 bool in_cleanup_point_expr; 168 }; 169 170 struct gimplify_omp_ctx 171 { 172 struct gimplify_omp_ctx *outer_context; 173 splay_tree variables; 174 hash_set<tree> *privatized_types; 175 location_t location; 176 enum omp_clause_default_kind default_kind; 177 enum omp_region_type region_type; 178 bool combined_loop; 179 bool distribute; 180 }; 181 182 static struct gimplify_ctx *gimplify_ctxp; 183 static struct gimplify_omp_ctx *gimplify_omp_ctxp; 184 185 /* Forward declaration. */ 186 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool); 187 188 /* Shorter alias name for the above function for use in gimplify.c 189 only. */ 190 191 static inline void 192 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs) 193 { 194 gimple_seq_add_stmt_without_update (seq_p, gs); 195 } 196 197 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is 198 NULL, a new sequence is allocated. This function is 199 similar to gimple_seq_add_seq, but does not scan the operands. 200 During gimplification, we need to manipulate statement sequences 201 before the def/use vectors have been constructed. */ 202 203 static void 204 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src) 205 { 206 gimple_stmt_iterator si; 207 208 if (src == NULL) 209 return; 210 211 si = gsi_last (*dst_p); 212 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT); 213 } 214 215 216 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing 217 and popping gimplify contexts. */ 218 219 static struct gimplify_ctx *ctx_pool = NULL; 220 221 /* Return a gimplify context struct from the pool. */ 222 223 static inline struct gimplify_ctx * 224 ctx_alloc (void) 225 { 226 struct gimplify_ctx * c = ctx_pool; 227 228 if (c) 229 ctx_pool = c->prev_context; 230 else 231 c = XNEW (struct gimplify_ctx); 232 233 memset (c, '\0', sizeof (*c)); 234 return c; 235 } 236 237 /* Put gimplify context C back into the pool. */ 238 239 static inline void 240 ctx_free (struct gimplify_ctx *c) 241 { 242 c->prev_context = ctx_pool; 243 ctx_pool = c; 244 } 245 246 /* Free allocated ctx stack memory. */ 247 248 void 249 free_gimplify_stack (void) 250 { 251 struct gimplify_ctx *c; 252 253 while ((c = ctx_pool)) 254 { 255 ctx_pool = c->prev_context; 256 free (c); 257 } 258 } 259 260 261 /* Set up a context for the gimplifier. */ 262 263 void 264 push_gimplify_context (bool in_ssa, bool rhs_cond_ok) 265 { 266 struct gimplify_ctx *c = ctx_alloc (); 267 268 c->prev_context = gimplify_ctxp; 269 gimplify_ctxp = c; 270 gimplify_ctxp->into_ssa = in_ssa; 271 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok; 272 } 273 274 /* Tear down a context for the gimplifier. If BODY is non-null, then 275 put the temporaries into the outer BIND_EXPR. Otherwise, put them 276 in the local_decls. 277 278 BODY is not a sequence, but the first tuple in a sequence. */ 279 280 void 281 pop_gimplify_context (gimple body) 282 { 283 struct gimplify_ctx *c = gimplify_ctxp; 284 285 gcc_assert (c 286 && (!c->bind_expr_stack.exists () 287 || c->bind_expr_stack.is_empty ())); 288 c->bind_expr_stack.release (); 289 gimplify_ctxp = c->prev_context; 290 291 if (body) 292 declare_vars (c->temps, body, false); 293 else 294 record_vars (c->temps); 295 296 delete c->temp_htab; 297 c->temp_htab = NULL; 298 ctx_free (c); 299 } 300 301 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */ 302 303 static void 304 gimple_push_bind_expr (gbind *bind_stmt) 305 { 306 gimplify_ctxp->bind_expr_stack.reserve (8); 307 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt); 308 } 309 310 /* Pop the first element off the stack of bindings. */ 311 312 static void 313 gimple_pop_bind_expr (void) 314 { 315 gimplify_ctxp->bind_expr_stack.pop (); 316 } 317 318 /* Return the first element of the stack of bindings. */ 319 320 gbind * 321 gimple_current_bind_expr (void) 322 { 323 return gimplify_ctxp->bind_expr_stack.last (); 324 } 325 326 /* Return the stack of bindings created during gimplification. */ 327 328 vec<gbind *> 329 gimple_bind_expr_stack (void) 330 { 331 return gimplify_ctxp->bind_expr_stack; 332 } 333 334 /* Return true iff there is a COND_EXPR between us and the innermost 335 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */ 336 337 static bool 338 gimple_conditional_context (void) 339 { 340 return gimplify_ctxp->conditions > 0; 341 } 342 343 /* Note that we've entered a COND_EXPR. */ 344 345 static void 346 gimple_push_condition (void) 347 { 348 #ifdef ENABLE_GIMPLE_CHECKING 349 if (gimplify_ctxp->conditions == 0) 350 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups)); 351 #endif 352 ++(gimplify_ctxp->conditions); 353 } 354 355 /* Note that we've left a COND_EXPR. If we're back at unconditional scope 356 now, add any conditional cleanups we've seen to the prequeue. */ 357 358 static void 359 gimple_pop_condition (gimple_seq *pre_p) 360 { 361 int conds = --(gimplify_ctxp->conditions); 362 363 gcc_assert (conds >= 0); 364 if (conds == 0) 365 { 366 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups); 367 gimplify_ctxp->conditional_cleanups = NULL; 368 } 369 } 370 371 /* A stable comparison routine for use with splay trees and DECLs. */ 372 373 static int 374 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb) 375 { 376 tree a = (tree) xa; 377 tree b = (tree) xb; 378 379 return DECL_UID (a) - DECL_UID (b); 380 } 381 382 /* Create a new omp construct that deals with variable remapping. */ 383 384 static struct gimplify_omp_ctx * 385 new_omp_context (enum omp_region_type region_type) 386 { 387 struct gimplify_omp_ctx *c; 388 389 c = XCNEW (struct gimplify_omp_ctx); 390 c->outer_context = gimplify_omp_ctxp; 391 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); 392 c->privatized_types = new hash_set<tree>; 393 c->location = input_location; 394 c->region_type = region_type; 395 if ((region_type & ORT_TASK) == 0) 396 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED; 397 else 398 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED; 399 400 return c; 401 } 402 403 /* Destroy an omp construct that deals with variable remapping. */ 404 405 static void 406 delete_omp_context (struct gimplify_omp_ctx *c) 407 { 408 splay_tree_delete (c->variables); 409 delete c->privatized_types; 410 XDELETE (c); 411 } 412 413 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int); 414 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool); 415 416 /* Both gimplify the statement T and append it to *SEQ_P. This function 417 behaves exactly as gimplify_stmt, but you don't have to pass T as a 418 reference. */ 419 420 void 421 gimplify_and_add (tree t, gimple_seq *seq_p) 422 { 423 gimplify_stmt (&t, seq_p); 424 } 425 426 /* Gimplify statement T into sequence *SEQ_P, and return the first 427 tuple in the sequence of generated tuples for this statement. 428 Return NULL if gimplifying T produced no tuples. */ 429 430 static gimple 431 gimplify_and_return_first (tree t, gimple_seq *seq_p) 432 { 433 gimple_stmt_iterator last = gsi_last (*seq_p); 434 435 gimplify_and_add (t, seq_p); 436 437 if (!gsi_end_p (last)) 438 { 439 gsi_next (&last); 440 return gsi_stmt (last); 441 } 442 else 443 return gimple_seq_first_stmt (*seq_p); 444 } 445 446 /* Returns true iff T is a valid RHS for an assignment to an un-renamed 447 LHS, or for a call argument. */ 448 449 static bool 450 is_gimple_mem_rhs (tree t) 451 { 452 /* If we're dealing with a renamable type, either source or dest must be 453 a renamed variable. */ 454 if (is_gimple_reg_type (TREE_TYPE (t))) 455 return is_gimple_val (t); 456 else 457 return is_gimple_val (t) || is_gimple_lvalue (t); 458 } 459 460 /* Return true if T is a CALL_EXPR or an expression that can be 461 assigned to a temporary. Note that this predicate should only be 462 used during gimplification. See the rationale for this in 463 gimplify_modify_expr. */ 464 465 static bool 466 is_gimple_reg_rhs_or_call (tree t) 467 { 468 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS 469 || TREE_CODE (t) == CALL_EXPR); 470 } 471 472 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that 473 this predicate should only be used during gimplification. See the 474 rationale for this in gimplify_modify_expr. */ 475 476 static bool 477 is_gimple_mem_rhs_or_call (tree t) 478 { 479 /* If we're dealing with a renamable type, either source or dest must be 480 a renamed variable. */ 481 if (is_gimple_reg_type (TREE_TYPE (t))) 482 return is_gimple_val (t); 483 else 484 return (is_gimple_val (t) || is_gimple_lvalue (t) 485 || TREE_CODE (t) == CALL_EXPR); 486 } 487 488 /* Create a temporary with a name derived from VAL. Subroutine of 489 lookup_tmp_var; nobody else should call this function. */ 490 491 static inline tree 492 create_tmp_from_val (tree val) 493 { 494 /* Drop all qualifiers and address-space information from the value type. */ 495 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val)); 496 tree var = create_tmp_var (type, get_name (val)); 497 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE 498 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE) 499 DECL_GIMPLE_REG_P (var) = 1; 500 return var; 501 } 502 503 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse 504 an existing expression temporary. */ 505 506 static tree 507 lookup_tmp_var (tree val, bool is_formal) 508 { 509 tree ret; 510 511 /* If not optimizing, never really reuse a temporary. local-alloc 512 won't allocate any variable that is used in more than one basic 513 block, which means it will go into memory, causing much extra 514 work in reload and final and poorer code generation, outweighing 515 the extra memory allocation here. */ 516 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val)) 517 ret = create_tmp_from_val (val); 518 else 519 { 520 elt_t elt, *elt_p; 521 elt_t **slot; 522 523 elt.val = val; 524 if (!gimplify_ctxp->temp_htab) 525 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000); 526 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT); 527 if (*slot == NULL) 528 { 529 elt_p = XNEW (elt_t); 530 elt_p->val = val; 531 elt_p->temp = ret = create_tmp_from_val (val); 532 *slot = elt_p; 533 } 534 else 535 { 536 elt_p = *slot; 537 ret = elt_p->temp; 538 } 539 } 540 541 return ret; 542 } 543 544 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */ 545 546 static tree 547 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p, 548 bool is_formal) 549 { 550 tree t, mod; 551 552 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we 553 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */ 554 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call, 555 fb_rvalue); 556 557 if (gimplify_ctxp->into_ssa 558 && is_gimple_reg_type (TREE_TYPE (val))) 559 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val))); 560 else 561 t = lookup_tmp_var (val, is_formal); 562 563 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val)); 564 565 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location)); 566 567 /* gimplify_modify_expr might want to reduce this further. */ 568 gimplify_and_add (mod, pre_p); 569 ggc_free (mod); 570 571 return t; 572 } 573 574 /* Return a formal temporary variable initialized with VAL. PRE_P is as 575 in gimplify_expr. Only use this function if: 576 577 1) The value of the unfactored expression represented by VAL will not 578 change between the initialization and use of the temporary, and 579 2) The temporary will not be otherwise modified. 580 581 For instance, #1 means that this is inappropriate for SAVE_EXPR temps, 582 and #2 means it is inappropriate for && temps. 583 584 For other cases, use get_initialized_tmp_var instead. */ 585 586 tree 587 get_formal_tmp_var (tree val, gimple_seq *pre_p) 588 { 589 return internal_get_tmp_var (val, pre_p, NULL, true); 590 } 591 592 /* Return a temporary variable initialized with VAL. PRE_P and POST_P 593 are as in gimplify_expr. */ 594 595 tree 596 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p) 597 { 598 return internal_get_tmp_var (val, pre_p, post_p, false); 599 } 600 601 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true, 602 generate debug info for them; otherwise don't. */ 603 604 void 605 declare_vars (tree vars, gimple gs, bool debug_info) 606 { 607 tree last = vars; 608 if (last) 609 { 610 tree temps, block; 611 612 gbind *scope = as_a <gbind *> (gs); 613 614 temps = nreverse (last); 615 616 block = gimple_bind_block (scope); 617 gcc_assert (!block || TREE_CODE (block) == BLOCK); 618 if (!block || !debug_info) 619 { 620 DECL_CHAIN (last) = gimple_bind_vars (scope); 621 gimple_bind_set_vars (scope, temps); 622 } 623 else 624 { 625 /* We need to attach the nodes both to the BIND_EXPR and to its 626 associated BLOCK for debugging purposes. The key point here 627 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR 628 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */ 629 if (BLOCK_VARS (block)) 630 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps); 631 else 632 { 633 gimple_bind_set_vars (scope, 634 chainon (gimple_bind_vars (scope), temps)); 635 BLOCK_VARS (block) = temps; 636 } 637 } 638 } 639 } 640 641 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound 642 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if 643 no such upper bound can be obtained. */ 644 645 static void 646 force_constant_size (tree var) 647 { 648 /* The only attempt we make is by querying the maximum size of objects 649 of the variable's type. */ 650 651 HOST_WIDE_INT max_size; 652 653 gcc_assert (TREE_CODE (var) == VAR_DECL); 654 655 max_size = max_int_size_in_bytes (TREE_TYPE (var)); 656 657 gcc_assert (max_size >= 0); 658 659 DECL_SIZE_UNIT (var) 660 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size); 661 DECL_SIZE (var) 662 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT); 663 } 664 665 /* Push the temporary variable TMP into the current binding. */ 666 667 void 668 gimple_add_tmp_var_fn (struct function *fn, tree tmp) 669 { 670 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); 671 672 /* Later processing assumes that the object size is constant, which might 673 not be true at this point. Force the use of a constant upper bound in 674 this case. */ 675 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp))) 676 force_constant_size (tmp); 677 678 DECL_CONTEXT (tmp) = fn->decl; 679 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; 680 681 record_vars_into (tmp, fn->decl); 682 } 683 684 /* Push the temporary variable TMP into the current binding. */ 685 686 void 687 gimple_add_tmp_var (tree tmp) 688 { 689 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp)); 690 691 /* Later processing assumes that the object size is constant, which might 692 not be true at this point. Force the use of a constant upper bound in 693 this case. */ 694 if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp))) 695 force_constant_size (tmp); 696 697 DECL_CONTEXT (tmp) = current_function_decl; 698 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1; 699 700 if (gimplify_ctxp) 701 { 702 DECL_CHAIN (tmp) = gimplify_ctxp->temps; 703 gimplify_ctxp->temps = tmp; 704 705 /* Mark temporaries local within the nearest enclosing parallel. */ 706 if (gimplify_omp_ctxp) 707 { 708 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 709 while (ctx 710 && (ctx->region_type == ORT_WORKSHARE 711 || ctx->region_type == ORT_SIMD)) 712 ctx = ctx->outer_context; 713 if (ctx) 714 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN); 715 } 716 } 717 else if (cfun) 718 record_vars (tmp); 719 else 720 { 721 gimple_seq body_seq; 722 723 /* This case is for nested functions. We need to expose the locals 724 they create. */ 725 body_seq = gimple_body (current_function_decl); 726 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false); 727 } 728 } 729 730 731 732 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree 733 nodes that are referenced more than once in GENERIC functions. This is 734 necessary because gimplification (translation into GIMPLE) is performed 735 by modifying tree nodes in-place, so gimplication of a shared node in a 736 first context could generate an invalid GIMPLE form in a second context. 737 738 This is achieved with a simple mark/copy/unmark algorithm that walks the 739 GENERIC representation top-down, marks nodes with TREE_VISITED the first 740 time it encounters them, duplicates them if they already have TREE_VISITED 741 set, and finally removes the TREE_VISITED marks it has set. 742 743 The algorithm works only at the function level, i.e. it generates a GENERIC 744 representation of a function with no nodes shared within the function when 745 passed a GENERIC function (except for nodes that are allowed to be shared). 746 747 At the global level, it is also necessary to unshare tree nodes that are 748 referenced in more than one function, for the same aforementioned reason. 749 This requires some cooperation from the front-end. There are 2 strategies: 750 751 1. Manual unsharing. The front-end needs to call unshare_expr on every 752 expression that might end up being shared across functions. 753 754 2. Deep unsharing. This is an extension of regular unsharing. Instead 755 of calling unshare_expr on expressions that might be shared across 756 functions, the front-end pre-marks them with TREE_VISITED. This will 757 ensure that they are unshared on the first reference within functions 758 when the regular unsharing algorithm runs. The counterpart is that 759 this algorithm must look deeper than for manual unsharing, which is 760 specified by LANG_HOOKS_DEEP_UNSHARING. 761 762 If there are only few specific cases of node sharing across functions, it is 763 probably easier for a front-end to unshare the expressions manually. On the 764 contrary, if the expressions generated at the global level are as widespread 765 as expressions generated within functions, deep unsharing is very likely the 766 way to go. */ 767 768 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes. 769 These nodes model computations that must be done once. If we were to 770 unshare something like SAVE_EXPR(i++), the gimplification process would 771 create wrong code. However, if DATA is non-null, it must hold a pointer 772 set that is used to unshare the subtrees of these nodes. */ 773 774 static tree 775 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data) 776 { 777 tree t = *tp; 778 enum tree_code code = TREE_CODE (t); 779 780 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but 781 copy their subtrees if we can make sure to do it only once. */ 782 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR) 783 { 784 if (data && !((hash_set<tree> *)data)->add (t)) 785 ; 786 else 787 *walk_subtrees = 0; 788 } 789 790 /* Stop at types, decls, constants like copy_tree_r. */ 791 else if (TREE_CODE_CLASS (code) == tcc_type 792 || TREE_CODE_CLASS (code) == tcc_declaration 793 || TREE_CODE_CLASS (code) == tcc_constant 794 /* We can't do anything sensible with a BLOCK used as an 795 expression, but we also can't just die when we see it 796 because of non-expression uses. So we avert our eyes 797 and cross our fingers. Silly Java. */ 798 || code == BLOCK) 799 *walk_subtrees = 0; 800 801 /* Cope with the statement expression extension. */ 802 else if (code == STATEMENT_LIST) 803 ; 804 805 /* Leave the bulk of the work to copy_tree_r itself. */ 806 else 807 copy_tree_r (tp, walk_subtrees, NULL); 808 809 return NULL_TREE; 810 } 811 812 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP. 813 If *TP has been visited already, then *TP is deeply copied by calling 814 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */ 815 816 static tree 817 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data) 818 { 819 tree t = *tp; 820 enum tree_code code = TREE_CODE (t); 821 822 /* Skip types, decls, and constants. But we do want to look at their 823 types and the bounds of types. Mark them as visited so we properly 824 unmark their subtrees on the unmark pass. If we've already seen them, 825 don't look down further. */ 826 if (TREE_CODE_CLASS (code) == tcc_type 827 || TREE_CODE_CLASS (code) == tcc_declaration 828 || TREE_CODE_CLASS (code) == tcc_constant) 829 { 830 if (TREE_VISITED (t)) 831 *walk_subtrees = 0; 832 else 833 TREE_VISITED (t) = 1; 834 } 835 836 /* If this node has been visited already, unshare it and don't look 837 any deeper. */ 838 else if (TREE_VISITED (t)) 839 { 840 walk_tree (tp, mostly_copy_tree_r, data, NULL); 841 *walk_subtrees = 0; 842 } 843 844 /* Otherwise, mark the node as visited and keep looking. */ 845 else 846 TREE_VISITED (t) = 1; 847 848 return NULL_TREE; 849 } 850 851 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the 852 copy_if_shared_r callback unmodified. */ 853 854 static inline void 855 copy_if_shared (tree *tp, void *data) 856 { 857 walk_tree (tp, copy_if_shared_r, data, NULL); 858 } 859 860 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of 861 any nested functions. */ 862 863 static void 864 unshare_body (tree fndecl) 865 { 866 struct cgraph_node *cgn = cgraph_node::get (fndecl); 867 /* If the language requires deep unsharing, we need a pointer set to make 868 sure we don't repeatedly unshare subtrees of unshareable nodes. */ 869 hash_set<tree> *visited 870 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL; 871 872 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited); 873 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited); 874 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited); 875 876 delete visited; 877 878 if (cgn) 879 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) 880 unshare_body (cgn->decl); 881 } 882 883 /* Callback for walk_tree to unmark the visited trees rooted at *TP. 884 Subtrees are walked until the first unvisited node is encountered. */ 885 886 static tree 887 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) 888 { 889 tree t = *tp; 890 891 /* If this node has been visited, unmark it and keep looking. */ 892 if (TREE_VISITED (t)) 893 TREE_VISITED (t) = 0; 894 895 /* Otherwise, don't look any deeper. */ 896 else 897 *walk_subtrees = 0; 898 899 return NULL_TREE; 900 } 901 902 /* Unmark the visited trees rooted at *TP. */ 903 904 static inline void 905 unmark_visited (tree *tp) 906 { 907 walk_tree (tp, unmark_visited_r, NULL, NULL); 908 } 909 910 /* Likewise, but mark all trees as not visited. */ 911 912 static void 913 unvisit_body (tree fndecl) 914 { 915 struct cgraph_node *cgn = cgraph_node::get (fndecl); 916 917 unmark_visited (&DECL_SAVED_TREE (fndecl)); 918 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl))); 919 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl))); 920 921 if (cgn) 922 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested) 923 unvisit_body (cgn->decl); 924 } 925 926 /* Unconditionally make an unshared copy of EXPR. This is used when using 927 stored expressions which span multiple functions, such as BINFO_VTABLE, 928 as the normal unsharing process can't tell that they're shared. */ 929 930 tree 931 unshare_expr (tree expr) 932 { 933 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL); 934 return expr; 935 } 936 937 /* Worker for unshare_expr_without_location. */ 938 939 static tree 940 prune_expr_location (tree *tp, int *walk_subtrees, void *) 941 { 942 if (EXPR_P (*tp)) 943 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION); 944 else 945 *walk_subtrees = 0; 946 return NULL_TREE; 947 } 948 949 /* Similar to unshare_expr but also prune all expression locations 950 from EXPR. */ 951 952 tree 953 unshare_expr_without_location (tree expr) 954 { 955 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL); 956 if (EXPR_P (expr)) 957 walk_tree (&expr, prune_expr_location, NULL, NULL); 958 return expr; 959 } 960 961 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both 962 contain statements and have a value. Assign its value to a temporary 963 and give it void_type_node. Return the temporary, or NULL_TREE if 964 WRAPPER was already void. */ 965 966 tree 967 voidify_wrapper_expr (tree wrapper, tree temp) 968 { 969 tree type = TREE_TYPE (wrapper); 970 if (type && !VOID_TYPE_P (type)) 971 { 972 tree *p; 973 974 /* Set p to point to the body of the wrapper. Loop until we find 975 something that isn't a wrapper. */ 976 for (p = &wrapper; p && *p; ) 977 { 978 switch (TREE_CODE (*p)) 979 { 980 case BIND_EXPR: 981 TREE_SIDE_EFFECTS (*p) = 1; 982 TREE_TYPE (*p) = void_type_node; 983 /* For a BIND_EXPR, the body is operand 1. */ 984 p = &BIND_EXPR_BODY (*p); 985 break; 986 987 case CLEANUP_POINT_EXPR: 988 case TRY_FINALLY_EXPR: 989 case TRY_CATCH_EXPR: 990 TREE_SIDE_EFFECTS (*p) = 1; 991 TREE_TYPE (*p) = void_type_node; 992 p = &TREE_OPERAND (*p, 0); 993 break; 994 995 case STATEMENT_LIST: 996 { 997 tree_stmt_iterator i = tsi_last (*p); 998 TREE_SIDE_EFFECTS (*p) = 1; 999 TREE_TYPE (*p) = void_type_node; 1000 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i); 1001 } 1002 break; 1003 1004 case COMPOUND_EXPR: 1005 /* Advance to the last statement. Set all container types to 1006 void. */ 1007 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1)) 1008 { 1009 TREE_SIDE_EFFECTS (*p) = 1; 1010 TREE_TYPE (*p) = void_type_node; 1011 } 1012 break; 1013 1014 case TRANSACTION_EXPR: 1015 TREE_SIDE_EFFECTS (*p) = 1; 1016 TREE_TYPE (*p) = void_type_node; 1017 p = &TRANSACTION_EXPR_BODY (*p); 1018 break; 1019 1020 default: 1021 /* Assume that any tree upon which voidify_wrapper_expr is 1022 directly called is a wrapper, and that its body is op0. */ 1023 if (p == &wrapper) 1024 { 1025 TREE_SIDE_EFFECTS (*p) = 1; 1026 TREE_TYPE (*p) = void_type_node; 1027 p = &TREE_OPERAND (*p, 0); 1028 break; 1029 } 1030 goto out; 1031 } 1032 } 1033 1034 out: 1035 if (p == NULL || IS_EMPTY_STMT (*p)) 1036 temp = NULL_TREE; 1037 else if (temp) 1038 { 1039 /* The wrapper is on the RHS of an assignment that we're pushing 1040 down. */ 1041 gcc_assert (TREE_CODE (temp) == INIT_EXPR 1042 || TREE_CODE (temp) == MODIFY_EXPR); 1043 TREE_OPERAND (temp, 1) = *p; 1044 *p = temp; 1045 } 1046 else 1047 { 1048 temp = create_tmp_var (type, "retval"); 1049 *p = build2 (INIT_EXPR, type, temp, *p); 1050 } 1051 1052 return temp; 1053 } 1054 1055 return NULL_TREE; 1056 } 1057 1058 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as 1059 a temporary through which they communicate. */ 1060 1061 static void 1062 build_stack_save_restore (gcall **save, gcall **restore) 1063 { 1064 tree tmp_var; 1065 1066 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0); 1067 tmp_var = create_tmp_var (ptr_type_node, "saved_stack"); 1068 gimple_call_set_lhs (*save, tmp_var); 1069 1070 *restore 1071 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE), 1072 1, tmp_var); 1073 } 1074 1075 /* Gimplify a BIND_EXPR. Just voidify and recurse. */ 1076 1077 static enum gimplify_status 1078 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p) 1079 { 1080 tree bind_expr = *expr_p; 1081 bool old_save_stack = gimplify_ctxp->save_stack; 1082 tree t; 1083 gbind *bind_stmt; 1084 gimple_seq body, cleanup; 1085 gcall *stack_save; 1086 location_t start_locus = 0, end_locus = 0; 1087 1088 tree temp = voidify_wrapper_expr (bind_expr, NULL); 1089 1090 /* Mark variables seen in this bind expr. */ 1091 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t)) 1092 { 1093 if (TREE_CODE (t) == VAR_DECL) 1094 { 1095 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 1096 1097 /* Mark variable as local. */ 1098 if (ctx && !DECL_EXTERNAL (t) 1099 && (! DECL_SEEN_IN_BIND_EXPR_P (t) 1100 || splay_tree_lookup (ctx->variables, 1101 (splay_tree_key) t) == NULL)) 1102 { 1103 if (ctx->region_type == ORT_SIMD 1104 && TREE_ADDRESSABLE (t) 1105 && !TREE_STATIC (t)) 1106 omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN); 1107 else 1108 omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN); 1109 } 1110 1111 DECL_SEEN_IN_BIND_EXPR_P (t) = 1; 1112 1113 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun) 1114 cfun->has_local_explicit_reg_vars = true; 1115 } 1116 1117 /* Preliminarily mark non-addressed complex variables as eligible 1118 for promotion to gimple registers. We'll transform their uses 1119 as we find them. */ 1120 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE 1121 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE) 1122 && !TREE_THIS_VOLATILE (t) 1123 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t)) 1124 && !needs_to_live_in_memory (t)) 1125 DECL_GIMPLE_REG_P (t) = 1; 1126 } 1127 1128 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL, 1129 BIND_EXPR_BLOCK (bind_expr)); 1130 gimple_push_bind_expr (bind_stmt); 1131 1132 gimplify_ctxp->save_stack = false; 1133 1134 /* Gimplify the body into the GIMPLE_BIND tuple's body. */ 1135 body = NULL; 1136 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body); 1137 gimple_bind_set_body (bind_stmt, body); 1138 1139 /* Source location wise, the cleanup code (stack_restore and clobbers) 1140 belongs to the end of the block, so propagate what we have. The 1141 stack_save operation belongs to the beginning of block, which we can 1142 infer from the bind_expr directly if the block has no explicit 1143 assignment. */ 1144 if (BIND_EXPR_BLOCK (bind_expr)) 1145 { 1146 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr)); 1147 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr)); 1148 } 1149 if (start_locus == 0) 1150 start_locus = EXPR_LOCATION (bind_expr); 1151 1152 cleanup = NULL; 1153 stack_save = NULL; 1154 if (gimplify_ctxp->save_stack) 1155 { 1156 gcall *stack_restore; 1157 1158 /* Save stack on entry and restore it on exit. Add a try_finally 1159 block to achieve this. */ 1160 build_stack_save_restore (&stack_save, &stack_restore); 1161 1162 gimple_set_location (stack_save, start_locus); 1163 gimple_set_location (stack_restore, end_locus); 1164 1165 gimplify_seq_add_stmt (&cleanup, stack_restore); 1166 } 1167 1168 /* Add clobbers for all variables that go out of scope. */ 1169 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t)) 1170 { 1171 if (TREE_CODE (t) == VAR_DECL 1172 && !is_global_var (t) 1173 && DECL_CONTEXT (t) == current_function_decl 1174 && !DECL_HARD_REGISTER (t) 1175 && !TREE_THIS_VOLATILE (t) 1176 && !DECL_HAS_VALUE_EXPR_P (t) 1177 /* Only care for variables that have to be in memory. Others 1178 will be rewritten into SSA names, hence moved to the top-level. */ 1179 && !is_gimple_reg (t) 1180 && flag_stack_reuse != SR_NONE) 1181 { 1182 tree clobber = build_constructor (TREE_TYPE (t), NULL); 1183 gimple clobber_stmt; 1184 TREE_THIS_VOLATILE (clobber) = 1; 1185 clobber_stmt = gimple_build_assign (t, clobber); 1186 gimple_set_location (clobber_stmt, end_locus); 1187 gimplify_seq_add_stmt (&cleanup, clobber_stmt); 1188 } 1189 } 1190 1191 if (cleanup) 1192 { 1193 gtry *gs; 1194 gimple_seq new_body; 1195 1196 new_body = NULL; 1197 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup, 1198 GIMPLE_TRY_FINALLY); 1199 1200 if (stack_save) 1201 gimplify_seq_add_stmt (&new_body, stack_save); 1202 gimplify_seq_add_stmt (&new_body, gs); 1203 gimple_bind_set_body (bind_stmt, new_body); 1204 } 1205 1206 gimplify_ctxp->save_stack = old_save_stack; 1207 gimple_pop_bind_expr (); 1208 1209 gimplify_seq_add_stmt (pre_p, bind_stmt); 1210 1211 if (temp) 1212 { 1213 *expr_p = temp; 1214 return GS_OK; 1215 } 1216 1217 *expr_p = NULL_TREE; 1218 return GS_ALL_DONE; 1219 } 1220 1221 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a 1222 GIMPLE value, it is assigned to a new temporary and the statement is 1223 re-written to return the temporary. 1224 1225 PRE_P points to the sequence where side effects that must happen before 1226 STMT should be stored. */ 1227 1228 static enum gimplify_status 1229 gimplify_return_expr (tree stmt, gimple_seq *pre_p) 1230 { 1231 greturn *ret; 1232 tree ret_expr = TREE_OPERAND (stmt, 0); 1233 tree result_decl, result; 1234 1235 if (ret_expr == error_mark_node) 1236 return GS_ERROR; 1237 1238 /* Implicit _Cilk_sync must be inserted right before any return statement 1239 if there is a _Cilk_spawn in the function. If the user has provided a 1240 _Cilk_sync, the optimizer should remove this duplicate one. */ 1241 if (fn_contains_cilk_spawn_p (cfun)) 1242 { 1243 tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node); 1244 gimplify_and_add (impl_sync, pre_p); 1245 } 1246 1247 if (!ret_expr 1248 || TREE_CODE (ret_expr) == RESULT_DECL 1249 || ret_expr == error_mark_node) 1250 { 1251 greturn *ret = gimple_build_return (ret_expr); 1252 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); 1253 gimplify_seq_add_stmt (pre_p, ret); 1254 return GS_ALL_DONE; 1255 } 1256 1257 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))) 1258 result_decl = NULL_TREE; 1259 else 1260 { 1261 result_decl = TREE_OPERAND (ret_expr, 0); 1262 1263 /* See through a return by reference. */ 1264 if (TREE_CODE (result_decl) == INDIRECT_REF) 1265 result_decl = TREE_OPERAND (result_decl, 0); 1266 1267 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR 1268 || TREE_CODE (ret_expr) == INIT_EXPR) 1269 && TREE_CODE (result_decl) == RESULT_DECL); 1270 } 1271 1272 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL. 1273 Recall that aggregate_value_p is FALSE for any aggregate type that is 1274 returned in registers. If we're returning values in registers, then 1275 we don't want to extend the lifetime of the RESULT_DECL, particularly 1276 across another call. In addition, for those aggregates for which 1277 hard_function_value generates a PARALLEL, we'll die during normal 1278 expansion of structure assignments; there's special code in expand_return 1279 to handle this case that does not exist in expand_expr. */ 1280 if (!result_decl) 1281 result = NULL_TREE; 1282 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl))) 1283 { 1284 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST) 1285 { 1286 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl))) 1287 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p); 1288 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL 1289 should be effectively allocated by the caller, i.e. all calls to 1290 this function must be subject to the Return Slot Optimization. */ 1291 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p); 1292 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p); 1293 } 1294 result = result_decl; 1295 } 1296 else if (gimplify_ctxp->return_temp) 1297 result = gimplify_ctxp->return_temp; 1298 else 1299 { 1300 result = create_tmp_reg (TREE_TYPE (result_decl)); 1301 1302 /* ??? With complex control flow (usually involving abnormal edges), 1303 we can wind up warning about an uninitialized value for this. Due 1304 to how this variable is constructed and initialized, this is never 1305 true. Give up and never warn. */ 1306 TREE_NO_WARNING (result) = 1; 1307 1308 gimplify_ctxp->return_temp = result; 1309 } 1310 1311 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use. 1312 Then gimplify the whole thing. */ 1313 if (result != result_decl) 1314 TREE_OPERAND (ret_expr, 0) = result; 1315 1316 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p); 1317 1318 ret = gimple_build_return (result); 1319 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt)); 1320 gimplify_seq_add_stmt (pre_p, ret); 1321 1322 return GS_ALL_DONE; 1323 } 1324 1325 /* Gimplify a variable-length array DECL. */ 1326 1327 static void 1328 gimplify_vla_decl (tree decl, gimple_seq *seq_p) 1329 { 1330 /* This is a variable-sized decl. Simplify its size and mark it 1331 for deferred expansion. */ 1332 tree t, addr, ptr_type; 1333 1334 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p); 1335 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p); 1336 1337 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */ 1338 if (DECL_HAS_VALUE_EXPR_P (decl)) 1339 return; 1340 1341 /* All occurrences of this decl in final gimplified code will be 1342 replaced by indirection. Setting DECL_VALUE_EXPR does two 1343 things: First, it lets the rest of the gimplifier know what 1344 replacement to use. Second, it lets the debug info know 1345 where to find the value. */ 1346 ptr_type = build_pointer_type (TREE_TYPE (decl)); 1347 addr = create_tmp_var (ptr_type, get_name (decl)); 1348 DECL_IGNORED_P (addr) = 0; 1349 t = build_fold_indirect_ref (addr); 1350 TREE_THIS_NOTRAP (t) = 1; 1351 SET_DECL_VALUE_EXPR (decl, t); 1352 DECL_HAS_VALUE_EXPR_P (decl) = 1; 1353 1354 t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN); 1355 t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl), 1356 size_int (DECL_ALIGN (decl))); 1357 /* The call has been built for a variable-sized object. */ 1358 CALL_ALLOCA_FOR_VAR_P (t) = 1; 1359 t = fold_convert (ptr_type, t); 1360 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t); 1361 1362 gimplify_and_add (t, seq_p); 1363 1364 /* Indicate that we need to restore the stack level when the 1365 enclosing BIND_EXPR is exited. */ 1366 gimplify_ctxp->save_stack = true; 1367 } 1368 1369 /* A helper function to be called via walk_tree. Mark all labels under *TP 1370 as being forced. To be called for DECL_INITIAL of static variables. */ 1371 1372 static tree 1373 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) 1374 { 1375 if (TYPE_P (*tp)) 1376 *walk_subtrees = 0; 1377 if (TREE_CODE (*tp) == LABEL_DECL) 1378 FORCED_LABEL (*tp) = 1; 1379 1380 return NULL_TREE; 1381 } 1382 1383 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation 1384 and initialization explicit. */ 1385 1386 static enum gimplify_status 1387 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p) 1388 { 1389 tree stmt = *stmt_p; 1390 tree decl = DECL_EXPR_DECL (stmt); 1391 1392 *stmt_p = NULL_TREE; 1393 1394 if (TREE_TYPE (decl) == error_mark_node) 1395 return GS_ERROR; 1396 1397 if ((TREE_CODE (decl) == TYPE_DECL 1398 || TREE_CODE (decl) == VAR_DECL) 1399 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl))) 1400 gimplify_type_sizes (TREE_TYPE (decl), seq_p); 1401 1402 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified 1403 in case its size expressions contain problematic nodes like CALL_EXPR. */ 1404 if (TREE_CODE (decl) == TYPE_DECL 1405 && DECL_ORIGINAL_TYPE (decl) 1406 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl))) 1407 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p); 1408 1409 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl)) 1410 { 1411 tree init = DECL_INITIAL (decl); 1412 1413 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST 1414 || (!TREE_STATIC (decl) 1415 && flag_stack_check == GENERIC_STACK_CHECK 1416 && compare_tree_int (DECL_SIZE_UNIT (decl), 1417 STACK_CHECK_MAX_VAR_SIZE) > 0)) 1418 gimplify_vla_decl (decl, seq_p); 1419 1420 /* Some front ends do not explicitly declare all anonymous 1421 artificial variables. We compensate here by declaring the 1422 variables, though it would be better if the front ends would 1423 explicitly declare them. */ 1424 if (!DECL_SEEN_IN_BIND_EXPR_P (decl) 1425 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE) 1426 gimple_add_tmp_var (decl); 1427 1428 if (init && init != error_mark_node) 1429 { 1430 if (!TREE_STATIC (decl)) 1431 { 1432 DECL_INITIAL (decl) = NULL_TREE; 1433 init = build2 (INIT_EXPR, void_type_node, decl, init); 1434 gimplify_and_add (init, seq_p); 1435 ggc_free (init); 1436 } 1437 else 1438 /* We must still examine initializers for static variables 1439 as they may contain a label address. */ 1440 walk_tree (&init, force_labels_r, NULL, NULL); 1441 } 1442 } 1443 1444 return GS_ALL_DONE; 1445 } 1446 1447 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body 1448 and replacing the LOOP_EXPR with goto, but if the loop contains an 1449 EXIT_EXPR, we need to append a label for it to jump to. */ 1450 1451 static enum gimplify_status 1452 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p) 1453 { 1454 tree saved_label = gimplify_ctxp->exit_label; 1455 tree start_label = create_artificial_label (UNKNOWN_LOCATION); 1456 1457 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label)); 1458 1459 gimplify_ctxp->exit_label = NULL_TREE; 1460 1461 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p); 1462 1463 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label)); 1464 1465 if (gimplify_ctxp->exit_label) 1466 gimplify_seq_add_stmt (pre_p, 1467 gimple_build_label (gimplify_ctxp->exit_label)); 1468 1469 gimplify_ctxp->exit_label = saved_label; 1470 1471 *expr_p = NULL; 1472 return GS_ALL_DONE; 1473 } 1474 1475 /* Gimplify a statement list onto a sequence. These may be created either 1476 by an enlightened front-end, or by shortcut_cond_expr. */ 1477 1478 static enum gimplify_status 1479 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p) 1480 { 1481 tree temp = voidify_wrapper_expr (*expr_p, NULL); 1482 1483 tree_stmt_iterator i = tsi_start (*expr_p); 1484 1485 while (!tsi_end_p (i)) 1486 { 1487 gimplify_stmt (tsi_stmt_ptr (i), pre_p); 1488 tsi_delink (&i); 1489 } 1490 1491 if (temp) 1492 { 1493 *expr_p = temp; 1494 return GS_OK; 1495 } 1496 1497 return GS_ALL_DONE; 1498 } 1499 1500 1501 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can 1502 branch to. */ 1503 1504 static enum gimplify_status 1505 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p) 1506 { 1507 tree switch_expr = *expr_p; 1508 gimple_seq switch_body_seq = NULL; 1509 enum gimplify_status ret; 1510 tree index_type = TREE_TYPE (switch_expr); 1511 if (index_type == NULL_TREE) 1512 index_type = TREE_TYPE (SWITCH_COND (switch_expr)); 1513 1514 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val, 1515 fb_rvalue); 1516 if (ret == GS_ERROR || ret == GS_UNHANDLED) 1517 return ret; 1518 1519 if (SWITCH_BODY (switch_expr)) 1520 { 1521 vec<tree> labels; 1522 vec<tree> saved_labels; 1523 tree default_case = NULL_TREE; 1524 gswitch *switch_stmt; 1525 1526 /* If someone can be bothered to fill in the labels, they can 1527 be bothered to null out the body too. */ 1528 gcc_assert (!SWITCH_LABELS (switch_expr)); 1529 1530 /* Save old labels, get new ones from body, then restore the old 1531 labels. Save all the things from the switch body to append after. */ 1532 saved_labels = gimplify_ctxp->case_labels; 1533 gimplify_ctxp->case_labels.create (8); 1534 1535 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq); 1536 labels = gimplify_ctxp->case_labels; 1537 gimplify_ctxp->case_labels = saved_labels; 1538 1539 preprocess_case_label_vec_for_gimple (labels, index_type, 1540 &default_case); 1541 1542 if (!default_case) 1543 { 1544 glabel *new_default; 1545 1546 default_case 1547 = build_case_label (NULL_TREE, NULL_TREE, 1548 create_artificial_label (UNKNOWN_LOCATION)); 1549 new_default = gimple_build_label (CASE_LABEL (default_case)); 1550 gimplify_seq_add_stmt (&switch_body_seq, new_default); 1551 } 1552 1553 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr), 1554 default_case, labels); 1555 gimplify_seq_add_stmt (pre_p, switch_stmt); 1556 gimplify_seq_add_seq (pre_p, switch_body_seq); 1557 labels.release (); 1558 } 1559 else 1560 gcc_assert (SWITCH_LABELS (switch_expr)); 1561 1562 return GS_ALL_DONE; 1563 } 1564 1565 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */ 1566 1567 static enum gimplify_status 1568 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p) 1569 { 1570 struct gimplify_ctx *ctxp; 1571 glabel *label_stmt; 1572 1573 /* Invalid programs can play Duff's Device type games with, for example, 1574 #pragma omp parallel. At least in the C front end, we don't 1575 detect such invalid branches until after gimplification, in the 1576 diagnose_omp_blocks pass. */ 1577 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context) 1578 if (ctxp->case_labels.exists ()) 1579 break; 1580 1581 label_stmt = gimple_build_label (CASE_LABEL (*expr_p)); 1582 ctxp->case_labels.safe_push (*expr_p); 1583 gimplify_seq_add_stmt (pre_p, label_stmt); 1584 1585 return GS_ALL_DONE; 1586 } 1587 1588 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first 1589 if necessary. */ 1590 1591 tree 1592 build_and_jump (tree *label_p) 1593 { 1594 if (label_p == NULL) 1595 /* If there's nowhere to jump, just fall through. */ 1596 return NULL_TREE; 1597 1598 if (*label_p == NULL_TREE) 1599 { 1600 tree label = create_artificial_label (UNKNOWN_LOCATION); 1601 *label_p = label; 1602 } 1603 1604 return build1 (GOTO_EXPR, void_type_node, *label_p); 1605 } 1606 1607 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR. 1608 This also involves building a label to jump to and communicating it to 1609 gimplify_loop_expr through gimplify_ctxp->exit_label. */ 1610 1611 static enum gimplify_status 1612 gimplify_exit_expr (tree *expr_p) 1613 { 1614 tree cond = TREE_OPERAND (*expr_p, 0); 1615 tree expr; 1616 1617 expr = build_and_jump (&gimplify_ctxp->exit_label); 1618 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE); 1619 *expr_p = expr; 1620 1621 return GS_OK; 1622 } 1623 1624 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is 1625 different from its canonical type, wrap the whole thing inside a 1626 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical 1627 type. 1628 1629 The canonical type of a COMPONENT_REF is the type of the field being 1630 referenced--unless the field is a bit-field which can be read directly 1631 in a smaller mode, in which case the canonical type is the 1632 sign-appropriate type corresponding to that mode. */ 1633 1634 static void 1635 canonicalize_component_ref (tree *expr_p) 1636 { 1637 tree expr = *expr_p; 1638 tree type; 1639 1640 gcc_assert (TREE_CODE (expr) == COMPONENT_REF); 1641 1642 if (INTEGRAL_TYPE_P (TREE_TYPE (expr))) 1643 type = TREE_TYPE (get_unwidened (expr, NULL_TREE)); 1644 else 1645 type = TREE_TYPE (TREE_OPERAND (expr, 1)); 1646 1647 /* One could argue that all the stuff below is not necessary for 1648 the non-bitfield case and declare it a FE error if type 1649 adjustment would be needed. */ 1650 if (TREE_TYPE (expr) != type) 1651 { 1652 #ifdef ENABLE_TYPES_CHECKING 1653 tree old_type = TREE_TYPE (expr); 1654 #endif 1655 int type_quals; 1656 1657 /* We need to preserve qualifiers and propagate them from 1658 operand 0. */ 1659 type_quals = TYPE_QUALS (type) 1660 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0))); 1661 if (TYPE_QUALS (type) != type_quals) 1662 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals); 1663 1664 /* Set the type of the COMPONENT_REF to the underlying type. */ 1665 TREE_TYPE (expr) = type; 1666 1667 #ifdef ENABLE_TYPES_CHECKING 1668 /* It is now a FE error, if the conversion from the canonical 1669 type to the original expression type is not useless. */ 1670 gcc_assert (useless_type_conversion_p (old_type, type)); 1671 #endif 1672 } 1673 } 1674 1675 /* If a NOP conversion is changing a pointer to array of foo to a pointer 1676 to foo, embed that change in the ADDR_EXPR by converting 1677 T array[U]; 1678 (T *)&array 1679 ==> 1680 &array[L] 1681 where L is the lower bound. For simplicity, only do this for constant 1682 lower bound. 1683 The constraint is that the type of &array[L] is trivially convertible 1684 to T *. */ 1685 1686 static void 1687 canonicalize_addr_expr (tree *expr_p) 1688 { 1689 tree expr = *expr_p; 1690 tree addr_expr = TREE_OPERAND (expr, 0); 1691 tree datype, ddatype, pddatype; 1692 1693 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */ 1694 if (!POINTER_TYPE_P (TREE_TYPE (expr)) 1695 || TREE_CODE (addr_expr) != ADDR_EXPR) 1696 return; 1697 1698 /* The addr_expr type should be a pointer to an array. */ 1699 datype = TREE_TYPE (TREE_TYPE (addr_expr)); 1700 if (TREE_CODE (datype) != ARRAY_TYPE) 1701 return; 1702 1703 /* The pointer to element type shall be trivially convertible to 1704 the expression pointer type. */ 1705 ddatype = TREE_TYPE (datype); 1706 pddatype = build_pointer_type (ddatype); 1707 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)), 1708 pddatype)) 1709 return; 1710 1711 /* The lower bound and element sizes must be constant. */ 1712 if (!TYPE_SIZE_UNIT (ddatype) 1713 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST 1714 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype)) 1715 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST) 1716 return; 1717 1718 /* All checks succeeded. Build a new node to merge the cast. */ 1719 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0), 1720 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)), 1721 NULL_TREE, NULL_TREE); 1722 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p); 1723 1724 /* We can have stripped a required restrict qualifier above. */ 1725 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p))) 1726 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p); 1727 } 1728 1729 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions 1730 underneath as appropriate. */ 1731 1732 static enum gimplify_status 1733 gimplify_conversion (tree *expr_p) 1734 { 1735 location_t loc = EXPR_LOCATION (*expr_p); 1736 gcc_assert (CONVERT_EXPR_P (*expr_p)); 1737 1738 /* Then strip away all but the outermost conversion. */ 1739 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0)); 1740 1741 /* And remove the outermost conversion if it's useless. */ 1742 if (tree_ssa_useless_type_conversion (*expr_p)) 1743 *expr_p = TREE_OPERAND (*expr_p, 0); 1744 1745 /* If we still have a conversion at the toplevel, 1746 then canonicalize some constructs. */ 1747 if (CONVERT_EXPR_P (*expr_p)) 1748 { 1749 tree sub = TREE_OPERAND (*expr_p, 0); 1750 1751 /* If a NOP conversion is changing the type of a COMPONENT_REF 1752 expression, then canonicalize its type now in order to expose more 1753 redundant conversions. */ 1754 if (TREE_CODE (sub) == COMPONENT_REF) 1755 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0)); 1756 1757 /* If a NOP conversion is changing a pointer to array of foo 1758 to a pointer to foo, embed that change in the ADDR_EXPR. */ 1759 else if (TREE_CODE (sub) == ADDR_EXPR) 1760 canonicalize_addr_expr (expr_p); 1761 } 1762 1763 /* If we have a conversion to a non-register type force the 1764 use of a VIEW_CONVERT_EXPR instead. */ 1765 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p))) 1766 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p), 1767 TREE_OPERAND (*expr_p, 0)); 1768 1769 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */ 1770 if (TREE_CODE (*expr_p) == CONVERT_EXPR) 1771 TREE_SET_CODE (*expr_p, NOP_EXPR); 1772 1773 return GS_OK; 1774 } 1775 1776 /* Nonlocal VLAs seen in the current function. */ 1777 static hash_set<tree> *nonlocal_vlas; 1778 1779 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */ 1780 static tree nonlocal_vla_vars; 1781 1782 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a 1783 DECL_VALUE_EXPR, and it's worth re-examining things. */ 1784 1785 static enum gimplify_status 1786 gimplify_var_or_parm_decl (tree *expr_p) 1787 { 1788 tree decl = *expr_p; 1789 1790 /* ??? If this is a local variable, and it has not been seen in any 1791 outer BIND_EXPR, then it's probably the result of a duplicate 1792 declaration, for which we've already issued an error. It would 1793 be really nice if the front end wouldn't leak these at all. 1794 Currently the only known culprit is C++ destructors, as seen 1795 in g++.old-deja/g++.jason/binding.C. */ 1796 if (TREE_CODE (decl) == VAR_DECL 1797 && !DECL_SEEN_IN_BIND_EXPR_P (decl) 1798 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl) 1799 && decl_function_context (decl) == current_function_decl) 1800 { 1801 gcc_assert (seen_error ()); 1802 return GS_ERROR; 1803 } 1804 1805 /* When within an OMP context, notice uses of variables. */ 1806 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true)) 1807 return GS_ALL_DONE; 1808 1809 /* If the decl is an alias for another expression, substitute it now. */ 1810 if (DECL_HAS_VALUE_EXPR_P (decl)) 1811 { 1812 tree value_expr = DECL_VALUE_EXPR (decl); 1813 1814 /* For referenced nonlocal VLAs add a decl for debugging purposes 1815 to the current function. */ 1816 if (TREE_CODE (decl) == VAR_DECL 1817 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST 1818 && nonlocal_vlas != NULL 1819 && TREE_CODE (value_expr) == INDIRECT_REF 1820 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL 1821 && decl_function_context (decl) != current_function_decl) 1822 { 1823 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 1824 while (ctx 1825 && (ctx->region_type == ORT_WORKSHARE 1826 || ctx->region_type == ORT_SIMD)) 1827 ctx = ctx->outer_context; 1828 if (!ctx && !nonlocal_vlas->add (decl)) 1829 { 1830 tree copy = copy_node (decl); 1831 1832 lang_hooks.dup_lang_specific_decl (copy); 1833 SET_DECL_RTL (copy, 0); 1834 TREE_USED (copy) = 1; 1835 DECL_CHAIN (copy) = nonlocal_vla_vars; 1836 nonlocal_vla_vars = copy; 1837 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr)); 1838 DECL_HAS_VALUE_EXPR_P (copy) = 1; 1839 } 1840 } 1841 1842 *expr_p = unshare_expr (value_expr); 1843 return GS_OK; 1844 } 1845 1846 return GS_ALL_DONE; 1847 } 1848 1849 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */ 1850 1851 static void 1852 recalculate_side_effects (tree t) 1853 { 1854 enum tree_code code = TREE_CODE (t); 1855 int len = TREE_OPERAND_LENGTH (t); 1856 int i; 1857 1858 switch (TREE_CODE_CLASS (code)) 1859 { 1860 case tcc_expression: 1861 switch (code) 1862 { 1863 case INIT_EXPR: 1864 case MODIFY_EXPR: 1865 case VA_ARG_EXPR: 1866 case PREDECREMENT_EXPR: 1867 case PREINCREMENT_EXPR: 1868 case POSTDECREMENT_EXPR: 1869 case POSTINCREMENT_EXPR: 1870 /* All of these have side-effects, no matter what their 1871 operands are. */ 1872 return; 1873 1874 default: 1875 break; 1876 } 1877 /* Fall through. */ 1878 1879 case tcc_comparison: /* a comparison expression */ 1880 case tcc_unary: /* a unary arithmetic expression */ 1881 case tcc_binary: /* a binary arithmetic expression */ 1882 case tcc_reference: /* a reference */ 1883 case tcc_vl_exp: /* a function call */ 1884 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t); 1885 for (i = 0; i < len; ++i) 1886 { 1887 tree op = TREE_OPERAND (t, i); 1888 if (op && TREE_SIDE_EFFECTS (op)) 1889 TREE_SIDE_EFFECTS (t) = 1; 1890 } 1891 break; 1892 1893 case tcc_constant: 1894 /* No side-effects. */ 1895 return; 1896 1897 default: 1898 gcc_unreachable (); 1899 } 1900 } 1901 1902 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR 1903 node *EXPR_P. 1904 1905 compound_lval 1906 : min_lval '[' val ']' 1907 | min_lval '.' ID 1908 | compound_lval '[' val ']' 1909 | compound_lval '.' ID 1910 1911 This is not part of the original SIMPLE definition, which separates 1912 array and member references, but it seems reasonable to handle them 1913 together. Also, this way we don't run into problems with union 1914 aliasing; gcc requires that for accesses through a union to alias, the 1915 union reference must be explicit, which was not always the case when we 1916 were splitting up array and member refs. 1917 1918 PRE_P points to the sequence where side effects that must happen before 1919 *EXPR_P should be stored. 1920 1921 POST_P points to the sequence where side effects that must happen after 1922 *EXPR_P should be stored. */ 1923 1924 static enum gimplify_status 1925 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 1926 fallback_t fallback) 1927 { 1928 tree *p; 1929 enum gimplify_status ret = GS_ALL_DONE, tret; 1930 int i; 1931 location_t loc = EXPR_LOCATION (*expr_p); 1932 tree expr = *expr_p; 1933 1934 /* Create a stack of the subexpressions so later we can walk them in 1935 order from inner to outer. */ 1936 auto_vec<tree, 10> expr_stack; 1937 1938 /* We can handle anything that get_inner_reference can deal with. */ 1939 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0)) 1940 { 1941 restart: 1942 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */ 1943 if (TREE_CODE (*p) == INDIRECT_REF) 1944 *p = fold_indirect_ref_loc (loc, *p); 1945 1946 if (handled_component_p (*p)) 1947 ; 1948 /* Expand DECL_VALUE_EXPR now. In some cases that may expose 1949 additional COMPONENT_REFs. */ 1950 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL) 1951 && gimplify_var_or_parm_decl (p) == GS_OK) 1952 goto restart; 1953 else 1954 break; 1955 1956 expr_stack.safe_push (*p); 1957 } 1958 1959 gcc_assert (expr_stack.length ()); 1960 1961 /* Now EXPR_STACK is a stack of pointers to all the refs we've 1962 walked through and P points to the innermost expression. 1963 1964 Java requires that we elaborated nodes in source order. That 1965 means we must gimplify the inner expression followed by each of 1966 the indices, in order. But we can't gimplify the inner 1967 expression until we deal with any variable bounds, sizes, or 1968 positions in order to deal with PLACEHOLDER_EXPRs. 1969 1970 So we do this in three steps. First we deal with the annotations 1971 for any variables in the components, then we gimplify the base, 1972 then we gimplify any indices, from left to right. */ 1973 for (i = expr_stack.length () - 1; i >= 0; i--) 1974 { 1975 tree t = expr_stack[i]; 1976 1977 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) 1978 { 1979 /* Gimplify the low bound and element type size and put them into 1980 the ARRAY_REF. If these values are set, they have already been 1981 gimplified. */ 1982 if (TREE_OPERAND (t, 2) == NULL_TREE) 1983 { 1984 tree low = unshare_expr (array_ref_low_bound (t)); 1985 if (!is_gimple_min_invariant (low)) 1986 { 1987 TREE_OPERAND (t, 2) = low; 1988 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, 1989 post_p, is_gimple_reg, 1990 fb_rvalue); 1991 ret = MIN (ret, tret); 1992 } 1993 } 1994 else 1995 { 1996 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, 1997 is_gimple_reg, fb_rvalue); 1998 ret = MIN (ret, tret); 1999 } 2000 2001 if (TREE_OPERAND (t, 3) == NULL_TREE) 2002 { 2003 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0))); 2004 tree elmt_size = unshare_expr (array_ref_element_size (t)); 2005 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type)); 2006 2007 /* Divide the element size by the alignment of the element 2008 type (above). */ 2009 elmt_size 2010 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor); 2011 2012 if (!is_gimple_min_invariant (elmt_size)) 2013 { 2014 TREE_OPERAND (t, 3) = elmt_size; 2015 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, 2016 post_p, is_gimple_reg, 2017 fb_rvalue); 2018 ret = MIN (ret, tret); 2019 } 2020 } 2021 else 2022 { 2023 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p, 2024 is_gimple_reg, fb_rvalue); 2025 ret = MIN (ret, tret); 2026 } 2027 } 2028 else if (TREE_CODE (t) == COMPONENT_REF) 2029 { 2030 /* Set the field offset into T and gimplify it. */ 2031 if (TREE_OPERAND (t, 2) == NULL_TREE) 2032 { 2033 tree offset = unshare_expr (component_ref_field_offset (t)); 2034 tree field = TREE_OPERAND (t, 1); 2035 tree factor 2036 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT); 2037 2038 /* Divide the offset by its alignment. */ 2039 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor); 2040 2041 if (!is_gimple_min_invariant (offset)) 2042 { 2043 TREE_OPERAND (t, 2) = offset; 2044 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, 2045 post_p, is_gimple_reg, 2046 fb_rvalue); 2047 ret = MIN (ret, tret); 2048 } 2049 } 2050 else 2051 { 2052 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p, 2053 is_gimple_reg, fb_rvalue); 2054 ret = MIN (ret, tret); 2055 } 2056 } 2057 } 2058 2059 /* Step 2 is to gimplify the base expression. Make sure lvalue is set 2060 so as to match the min_lval predicate. Failure to do so may result 2061 in the creation of large aggregate temporaries. */ 2062 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, 2063 fallback | fb_lvalue); 2064 ret = MIN (ret, tret); 2065 2066 /* And finally, the indices and operands of ARRAY_REF. During this 2067 loop we also remove any useless conversions. */ 2068 for (; expr_stack.length () > 0; ) 2069 { 2070 tree t = expr_stack.pop (); 2071 2072 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) 2073 { 2074 /* Gimplify the dimension. */ 2075 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))) 2076 { 2077 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p, 2078 is_gimple_val, fb_rvalue); 2079 ret = MIN (ret, tret); 2080 } 2081 } 2082 2083 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0)); 2084 2085 /* The innermost expression P may have originally had 2086 TREE_SIDE_EFFECTS set which would have caused all the outer 2087 expressions in *EXPR_P leading to P to also have had 2088 TREE_SIDE_EFFECTS set. */ 2089 recalculate_side_effects (t); 2090 } 2091 2092 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */ 2093 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF) 2094 { 2095 canonicalize_component_ref (expr_p); 2096 } 2097 2098 expr_stack.release (); 2099 2100 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE); 2101 2102 return ret; 2103 } 2104 2105 /* Gimplify the self modifying expression pointed to by EXPR_P 2106 (++, --, +=, -=). 2107 2108 PRE_P points to the list where side effects that must happen before 2109 *EXPR_P should be stored. 2110 2111 POST_P points to the list where side effects that must happen after 2112 *EXPR_P should be stored. 2113 2114 WANT_VALUE is nonzero iff we want to use the value of this expression 2115 in another expression. 2116 2117 ARITH_TYPE is the type the computation should be performed in. */ 2118 2119 enum gimplify_status 2120 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 2121 bool want_value, tree arith_type) 2122 { 2123 enum tree_code code; 2124 tree lhs, lvalue, rhs, t1; 2125 gimple_seq post = NULL, *orig_post_p = post_p; 2126 bool postfix; 2127 enum tree_code arith_code; 2128 enum gimplify_status ret; 2129 location_t loc = EXPR_LOCATION (*expr_p); 2130 2131 code = TREE_CODE (*expr_p); 2132 2133 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR 2134 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR); 2135 2136 /* Prefix or postfix? */ 2137 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR) 2138 /* Faster to treat as prefix if result is not used. */ 2139 postfix = want_value; 2140 else 2141 postfix = false; 2142 2143 /* For postfix, make sure the inner expression's post side effects 2144 are executed after side effects from this expression. */ 2145 if (postfix) 2146 post_p = &post; 2147 2148 /* Add or subtract? */ 2149 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR) 2150 arith_code = PLUS_EXPR; 2151 else 2152 arith_code = MINUS_EXPR; 2153 2154 /* Gimplify the LHS into a GIMPLE lvalue. */ 2155 lvalue = TREE_OPERAND (*expr_p, 0); 2156 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue); 2157 if (ret == GS_ERROR) 2158 return ret; 2159 2160 /* Extract the operands to the arithmetic operation. */ 2161 lhs = lvalue; 2162 rhs = TREE_OPERAND (*expr_p, 1); 2163 2164 /* For postfix operator, we evaluate the LHS to an rvalue and then use 2165 that as the result value and in the postqueue operation. */ 2166 if (postfix) 2167 { 2168 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue); 2169 if (ret == GS_ERROR) 2170 return ret; 2171 2172 lhs = get_initialized_tmp_var (lhs, pre_p, NULL); 2173 } 2174 2175 /* For POINTERs increment, use POINTER_PLUS_EXPR. */ 2176 if (POINTER_TYPE_P (TREE_TYPE (lhs))) 2177 { 2178 rhs = convert_to_ptrofftype_loc (loc, rhs); 2179 if (arith_code == MINUS_EXPR) 2180 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs); 2181 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs); 2182 } 2183 else 2184 t1 = fold_convert (TREE_TYPE (*expr_p), 2185 fold_build2 (arith_code, arith_type, 2186 fold_convert (arith_type, lhs), 2187 fold_convert (arith_type, rhs))); 2188 2189 if (postfix) 2190 { 2191 gimplify_assign (lvalue, t1, pre_p); 2192 gimplify_seq_add_seq (orig_post_p, post); 2193 *expr_p = lhs; 2194 return GS_ALL_DONE; 2195 } 2196 else 2197 { 2198 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1); 2199 return GS_OK; 2200 } 2201 } 2202 2203 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */ 2204 2205 static void 2206 maybe_with_size_expr (tree *expr_p) 2207 { 2208 tree expr = *expr_p; 2209 tree type = TREE_TYPE (expr); 2210 tree size; 2211 2212 /* If we've already wrapped this or the type is error_mark_node, we can't do 2213 anything. */ 2214 if (TREE_CODE (expr) == WITH_SIZE_EXPR 2215 || type == error_mark_node) 2216 return; 2217 2218 /* If the size isn't known or is a constant, we have nothing to do. */ 2219 size = TYPE_SIZE_UNIT (type); 2220 if (!size || TREE_CODE (size) == INTEGER_CST) 2221 return; 2222 2223 /* Otherwise, make a WITH_SIZE_EXPR. */ 2224 size = unshare_expr (size); 2225 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr); 2226 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size); 2227 } 2228 2229 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P 2230 Store any side-effects in PRE_P. CALL_LOCATION is the location of 2231 the CALL_EXPR. */ 2232 2233 enum gimplify_status 2234 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location) 2235 { 2236 bool (*test) (tree); 2237 fallback_t fb; 2238 2239 /* In general, we allow lvalues for function arguments to avoid 2240 extra overhead of copying large aggregates out of even larger 2241 aggregates into temporaries only to copy the temporaries to 2242 the argument list. Make optimizers happy by pulling out to 2243 temporaries those types that fit in registers. */ 2244 if (is_gimple_reg_type (TREE_TYPE (*arg_p))) 2245 test = is_gimple_val, fb = fb_rvalue; 2246 else 2247 { 2248 test = is_gimple_lvalue, fb = fb_either; 2249 /* Also strip a TARGET_EXPR that would force an extra copy. */ 2250 if (TREE_CODE (*arg_p) == TARGET_EXPR) 2251 { 2252 tree init = TARGET_EXPR_INITIAL (*arg_p); 2253 if (init 2254 && !VOID_TYPE_P (TREE_TYPE (init))) 2255 *arg_p = init; 2256 } 2257 } 2258 2259 /* If this is a variable sized type, we must remember the size. */ 2260 maybe_with_size_expr (arg_p); 2261 2262 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */ 2263 /* Make sure arguments have the same location as the function call 2264 itself. */ 2265 protected_set_expr_location (*arg_p, call_location); 2266 2267 /* There is a sequence point before a function call. Side effects in 2268 the argument list must occur before the actual call. So, when 2269 gimplifying arguments, force gimplify_expr to use an internal 2270 post queue which is then appended to the end of PRE_P. */ 2271 return gimplify_expr (arg_p, pre_p, NULL, test, fb); 2272 } 2273 2274 /* Don't fold inside offloading or taskreg regions: it can break code by 2275 adding decl references that weren't in the source. We'll do it during 2276 omplower pass instead. */ 2277 2278 static bool 2279 maybe_fold_stmt (gimple_stmt_iterator *gsi) 2280 { 2281 struct gimplify_omp_ctx *ctx; 2282 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context) 2283 if (ctx->region_type == ORT_TARGET 2284 || (ctx->region_type & (ORT_PARALLEL | ORT_TASK)) != 0) 2285 return false; 2286 return fold_stmt (gsi); 2287 } 2288 2289 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P. 2290 WANT_VALUE is true if the result of the call is desired. */ 2291 2292 static enum gimplify_status 2293 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) 2294 { 2295 tree fndecl, parms, p, fnptrtype; 2296 enum gimplify_status ret; 2297 int i, nargs; 2298 gcall *call; 2299 bool builtin_va_start_p = false; 2300 location_t loc = EXPR_LOCATION (*expr_p); 2301 2302 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR); 2303 2304 /* For reliable diagnostics during inlining, it is necessary that 2305 every call_expr be annotated with file and line. */ 2306 if (! EXPR_HAS_LOCATION (*expr_p)) 2307 SET_EXPR_LOCATION (*expr_p, input_location); 2308 2309 /* Gimplify internal functions created in the FEs. */ 2310 if (CALL_EXPR_FN (*expr_p) == NULL_TREE) 2311 { 2312 if (want_value) 2313 return GS_ALL_DONE; 2314 2315 nargs = call_expr_nargs (*expr_p); 2316 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p); 2317 auto_vec<tree> vargs (nargs); 2318 2319 for (i = 0; i < nargs; i++) 2320 { 2321 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, 2322 EXPR_LOCATION (*expr_p)); 2323 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i)); 2324 } 2325 gimple call = gimple_build_call_internal_vec (ifn, vargs); 2326 gimplify_seq_add_stmt (pre_p, call); 2327 return GS_ALL_DONE; 2328 } 2329 2330 /* This may be a call to a builtin function. 2331 2332 Builtin function calls may be transformed into different 2333 (and more efficient) builtin function calls under certain 2334 circumstances. Unfortunately, gimplification can muck things 2335 up enough that the builtin expanders are not aware that certain 2336 transformations are still valid. 2337 2338 So we attempt transformation/gimplification of the call before 2339 we gimplify the CALL_EXPR. At this time we do not manage to 2340 transform all calls in the same manner as the expanders do, but 2341 we do transform most of them. */ 2342 fndecl = get_callee_fndecl (*expr_p); 2343 if (fndecl 2344 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) 2345 switch (DECL_FUNCTION_CODE (fndecl)) 2346 { 2347 case BUILT_IN_VA_START: 2348 { 2349 builtin_va_start_p = TRUE; 2350 if (call_expr_nargs (*expr_p) < 2) 2351 { 2352 error ("too few arguments to function %<va_start%>"); 2353 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p)); 2354 return GS_OK; 2355 } 2356 2357 if (fold_builtin_next_arg (*expr_p, true)) 2358 { 2359 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p)); 2360 return GS_OK; 2361 } 2362 break; 2363 } 2364 case BUILT_IN_LINE: 2365 { 2366 *expr_p = build_int_cst (TREE_TYPE (*expr_p), 2367 LOCATION_LINE (EXPR_LOCATION (*expr_p))); 2368 return GS_OK; 2369 } 2370 case BUILT_IN_FILE: 2371 { 2372 const char *locfile = LOCATION_FILE (EXPR_LOCATION (*expr_p)); 2373 *expr_p = build_string_literal (strlen (locfile) + 1, locfile); 2374 return GS_OK; 2375 } 2376 case BUILT_IN_FUNCTION: 2377 { 2378 const char *function; 2379 function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl)); 2380 *expr_p = build_string_literal (strlen (function) + 1, function); 2381 return GS_OK; 2382 } 2383 default: 2384 ; 2385 } 2386 if (fndecl && DECL_BUILT_IN (fndecl)) 2387 { 2388 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value); 2389 if (new_tree && new_tree != *expr_p) 2390 { 2391 /* There was a transformation of this call which computes the 2392 same value, but in a more efficient way. Return and try 2393 again. */ 2394 *expr_p = new_tree; 2395 return GS_OK; 2396 } 2397 } 2398 2399 /* Remember the original function pointer type. */ 2400 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p)); 2401 2402 /* There is a sequence point before the call, so any side effects in 2403 the calling expression must occur before the actual call. Force 2404 gimplify_expr to use an internal post queue. */ 2405 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL, 2406 is_gimple_call_addr, fb_rvalue); 2407 2408 nargs = call_expr_nargs (*expr_p); 2409 2410 /* Get argument types for verification. */ 2411 fndecl = get_callee_fndecl (*expr_p); 2412 parms = NULL_TREE; 2413 if (fndecl) 2414 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl)); 2415 else 2416 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype)); 2417 2418 if (fndecl && DECL_ARGUMENTS (fndecl)) 2419 p = DECL_ARGUMENTS (fndecl); 2420 else if (parms) 2421 p = parms; 2422 else 2423 p = NULL_TREE; 2424 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p)) 2425 ; 2426 2427 /* If the last argument is __builtin_va_arg_pack () and it is not 2428 passed as a named argument, decrease the number of CALL_EXPR 2429 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */ 2430 if (!p 2431 && i < nargs 2432 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR) 2433 { 2434 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1); 2435 tree last_arg_fndecl = get_callee_fndecl (last_arg); 2436 2437 if (last_arg_fndecl 2438 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL 2439 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL 2440 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK) 2441 { 2442 tree call = *expr_p; 2443 2444 --nargs; 2445 *expr_p = build_call_array_loc (loc, TREE_TYPE (call), 2446 CALL_EXPR_FN (call), 2447 nargs, CALL_EXPR_ARGP (call)); 2448 2449 /* Copy all CALL_EXPR flags, location and block, except 2450 CALL_EXPR_VA_ARG_PACK flag. */ 2451 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call); 2452 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call); 2453 CALL_EXPR_RETURN_SLOT_OPT (*expr_p) 2454 = CALL_EXPR_RETURN_SLOT_OPT (call); 2455 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call); 2456 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call)); 2457 2458 /* Set CALL_EXPR_VA_ARG_PACK. */ 2459 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1; 2460 } 2461 } 2462 2463 /* Gimplify the function arguments. */ 2464 if (nargs > 0) 2465 { 2466 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0); 2467 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs; 2468 PUSH_ARGS_REVERSED ? i-- : i++) 2469 { 2470 enum gimplify_status t; 2471 2472 /* Avoid gimplifying the second argument to va_start, which needs to 2473 be the plain PARM_DECL. */ 2474 if ((i != 1) || !builtin_va_start_p) 2475 { 2476 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, 2477 EXPR_LOCATION (*expr_p)); 2478 2479 if (t == GS_ERROR) 2480 ret = GS_ERROR; 2481 } 2482 } 2483 } 2484 2485 /* Gimplify the static chain. */ 2486 if (CALL_EXPR_STATIC_CHAIN (*expr_p)) 2487 { 2488 if (fndecl && !DECL_STATIC_CHAIN (fndecl)) 2489 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL; 2490 else 2491 { 2492 enum gimplify_status t; 2493 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p, 2494 EXPR_LOCATION (*expr_p)); 2495 if (t == GS_ERROR) 2496 ret = GS_ERROR; 2497 } 2498 } 2499 2500 /* Verify the function result. */ 2501 if (want_value && fndecl 2502 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype)))) 2503 { 2504 error_at (loc, "using result of function returning %<void%>"); 2505 ret = GS_ERROR; 2506 } 2507 2508 /* Try this again in case gimplification exposed something. */ 2509 if (ret != GS_ERROR) 2510 { 2511 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value); 2512 2513 if (new_tree && new_tree != *expr_p) 2514 { 2515 /* There was a transformation of this call which computes the 2516 same value, but in a more efficient way. Return and try 2517 again. */ 2518 *expr_p = new_tree; 2519 return GS_OK; 2520 } 2521 } 2522 else 2523 { 2524 *expr_p = error_mark_node; 2525 return GS_ERROR; 2526 } 2527 2528 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its 2529 decl. This allows us to eliminate redundant or useless 2530 calls to "const" functions. */ 2531 if (TREE_CODE (*expr_p) == CALL_EXPR) 2532 { 2533 int flags = call_expr_flags (*expr_p); 2534 if (flags & (ECF_CONST | ECF_PURE) 2535 /* An infinite loop is considered a side effect. */ 2536 && !(flags & (ECF_LOOPING_CONST_OR_PURE))) 2537 TREE_SIDE_EFFECTS (*expr_p) = 0; 2538 } 2539 2540 /* If the value is not needed by the caller, emit a new GIMPLE_CALL 2541 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified 2542 form and delegate the creation of a GIMPLE_CALL to 2543 gimplify_modify_expr. This is always possible because when 2544 WANT_VALUE is true, the caller wants the result of this call into 2545 a temporary, which means that we will emit an INIT_EXPR in 2546 internal_get_tmp_var which will then be handled by 2547 gimplify_modify_expr. */ 2548 if (!want_value) 2549 { 2550 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we 2551 have to do is replicate it as a GIMPLE_CALL tuple. */ 2552 gimple_stmt_iterator gsi; 2553 call = gimple_build_call_from_tree (*expr_p); 2554 gimple_call_set_fntype (call, TREE_TYPE (fnptrtype)); 2555 notice_special_calls (call); 2556 gimplify_seq_add_stmt (pre_p, call); 2557 gsi = gsi_last (*pre_p); 2558 maybe_fold_stmt (&gsi); 2559 *expr_p = NULL_TREE; 2560 } 2561 else 2562 /* Remember the original function type. */ 2563 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype, 2564 CALL_EXPR_FN (*expr_p)); 2565 2566 return ret; 2567 } 2568 2569 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by 2570 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs. 2571 2572 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the 2573 condition is true or false, respectively. If null, we should generate 2574 our own to skip over the evaluation of this specific expression. 2575 2576 LOCUS is the source location of the COND_EXPR. 2577 2578 This function is the tree equivalent of do_jump. 2579 2580 shortcut_cond_r should only be called by shortcut_cond_expr. */ 2581 2582 static tree 2583 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p, 2584 location_t locus) 2585 { 2586 tree local_label = NULL_TREE; 2587 tree t, expr = NULL; 2588 2589 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to 2590 retain the shortcut semantics. Just insert the gotos here; 2591 shortcut_cond_expr will append the real blocks later. */ 2592 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) 2593 { 2594 location_t new_locus; 2595 2596 /* Turn if (a && b) into 2597 2598 if (a); else goto no; 2599 if (b) goto yes; else goto no; 2600 (no:) */ 2601 2602 if (false_label_p == NULL) 2603 false_label_p = &local_label; 2604 2605 /* Keep the original source location on the first 'if'. */ 2606 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus); 2607 append_to_statement_list (t, &expr); 2608 2609 /* Set the source location of the && on the second 'if'. */ 2610 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus; 2611 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p, 2612 new_locus); 2613 append_to_statement_list (t, &expr); 2614 } 2615 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR) 2616 { 2617 location_t new_locus; 2618 2619 /* Turn if (a || b) into 2620 2621 if (a) goto yes; 2622 if (b) goto yes; else goto no; 2623 (yes:) */ 2624 2625 if (true_label_p == NULL) 2626 true_label_p = &local_label; 2627 2628 /* Keep the original source location on the first 'if'. */ 2629 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus); 2630 append_to_statement_list (t, &expr); 2631 2632 /* Set the source location of the || on the second 'if'. */ 2633 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus; 2634 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p, 2635 new_locus); 2636 append_to_statement_list (t, &expr); 2637 } 2638 else if (TREE_CODE (pred) == COND_EXPR 2639 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1))) 2640 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2)))) 2641 { 2642 location_t new_locus; 2643 2644 /* As long as we're messing with gotos, turn if (a ? b : c) into 2645 if (a) 2646 if (b) goto yes; else goto no; 2647 else 2648 if (c) goto yes; else goto no; 2649 2650 Don't do this if one of the arms has void type, which can happen 2651 in C++ when the arm is throw. */ 2652 2653 /* Keep the original source location on the first 'if'. Set the source 2654 location of the ? on the second 'if'. */ 2655 new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus; 2656 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0), 2657 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, 2658 false_label_p, locus), 2659 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p, 2660 false_label_p, new_locus)); 2661 } 2662 else 2663 { 2664 expr = build3 (COND_EXPR, void_type_node, pred, 2665 build_and_jump (true_label_p), 2666 build_and_jump (false_label_p)); 2667 SET_EXPR_LOCATION (expr, locus); 2668 } 2669 2670 if (local_label) 2671 { 2672 t = build1 (LABEL_EXPR, void_type_node, local_label); 2673 append_to_statement_list (t, &expr); 2674 } 2675 2676 return expr; 2677 } 2678 2679 /* Given a conditional expression EXPR with short-circuit boolean 2680 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the 2681 predicate apart into the equivalent sequence of conditionals. */ 2682 2683 static tree 2684 shortcut_cond_expr (tree expr) 2685 { 2686 tree pred = TREE_OPERAND (expr, 0); 2687 tree then_ = TREE_OPERAND (expr, 1); 2688 tree else_ = TREE_OPERAND (expr, 2); 2689 tree true_label, false_label, end_label, t; 2690 tree *true_label_p; 2691 tree *false_label_p; 2692 bool emit_end, emit_false, jump_over_else; 2693 bool then_se = then_ && TREE_SIDE_EFFECTS (then_); 2694 bool else_se = else_ && TREE_SIDE_EFFECTS (else_); 2695 2696 /* First do simple transformations. */ 2697 if (!else_se) 2698 { 2699 /* If there is no 'else', turn 2700 if (a && b) then c 2701 into 2702 if (a) if (b) then c. */ 2703 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR) 2704 { 2705 /* Keep the original source location on the first 'if'. */ 2706 location_t locus = EXPR_LOC_OR_LOC (expr, input_location); 2707 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); 2708 /* Set the source location of the && on the second 'if'. */ 2709 if (EXPR_HAS_LOCATION (pred)) 2710 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred)); 2711 then_ = shortcut_cond_expr (expr); 2712 then_se = then_ && TREE_SIDE_EFFECTS (then_); 2713 pred = TREE_OPERAND (pred, 0); 2714 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE); 2715 SET_EXPR_LOCATION (expr, locus); 2716 } 2717 } 2718 2719 if (!then_se) 2720 { 2721 /* If there is no 'then', turn 2722 if (a || b); else d 2723 into 2724 if (a); else if (b); else d. */ 2725 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR) 2726 { 2727 /* Keep the original source location on the first 'if'. */ 2728 location_t locus = EXPR_LOC_OR_LOC (expr, input_location); 2729 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1); 2730 /* Set the source location of the || on the second 'if'. */ 2731 if (EXPR_HAS_LOCATION (pred)) 2732 SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred)); 2733 else_ = shortcut_cond_expr (expr); 2734 else_se = else_ && TREE_SIDE_EFFECTS (else_); 2735 pred = TREE_OPERAND (pred, 0); 2736 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_); 2737 SET_EXPR_LOCATION (expr, locus); 2738 } 2739 } 2740 2741 /* If we're done, great. */ 2742 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR 2743 && TREE_CODE (pred) != TRUTH_ORIF_EXPR) 2744 return expr; 2745 2746 /* Otherwise we need to mess with gotos. Change 2747 if (a) c; else d; 2748 to 2749 if (a); else goto no; 2750 c; goto end; 2751 no: d; end: 2752 and recursively gimplify the condition. */ 2753 2754 true_label = false_label = end_label = NULL_TREE; 2755 2756 /* If our arms just jump somewhere, hijack those labels so we don't 2757 generate jumps to jumps. */ 2758 2759 if (then_ 2760 && TREE_CODE (then_) == GOTO_EXPR 2761 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL) 2762 { 2763 true_label = GOTO_DESTINATION (then_); 2764 then_ = NULL; 2765 then_se = false; 2766 } 2767 2768 if (else_ 2769 && TREE_CODE (else_) == GOTO_EXPR 2770 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL) 2771 { 2772 false_label = GOTO_DESTINATION (else_); 2773 else_ = NULL; 2774 else_se = false; 2775 } 2776 2777 /* If we aren't hijacking a label for the 'then' branch, it falls through. */ 2778 if (true_label) 2779 true_label_p = &true_label; 2780 else 2781 true_label_p = NULL; 2782 2783 /* The 'else' branch also needs a label if it contains interesting code. */ 2784 if (false_label || else_se) 2785 false_label_p = &false_label; 2786 else 2787 false_label_p = NULL; 2788 2789 /* If there was nothing else in our arms, just forward the label(s). */ 2790 if (!then_se && !else_se) 2791 return shortcut_cond_r (pred, true_label_p, false_label_p, 2792 EXPR_LOC_OR_LOC (expr, input_location)); 2793 2794 /* If our last subexpression already has a terminal label, reuse it. */ 2795 if (else_se) 2796 t = expr_last (else_); 2797 else if (then_se) 2798 t = expr_last (then_); 2799 else 2800 t = NULL; 2801 if (t && TREE_CODE (t) == LABEL_EXPR) 2802 end_label = LABEL_EXPR_LABEL (t); 2803 2804 /* If we don't care about jumping to the 'else' branch, jump to the end 2805 if the condition is false. */ 2806 if (!false_label_p) 2807 false_label_p = &end_label; 2808 2809 /* We only want to emit these labels if we aren't hijacking them. */ 2810 emit_end = (end_label == NULL_TREE); 2811 emit_false = (false_label == NULL_TREE); 2812 2813 /* We only emit the jump over the else clause if we have to--if the 2814 then clause may fall through. Otherwise we can wind up with a 2815 useless jump and a useless label at the end of gimplified code, 2816 which will cause us to think that this conditional as a whole 2817 falls through even if it doesn't. If we then inline a function 2818 which ends with such a condition, that can cause us to issue an 2819 inappropriate warning about control reaching the end of a 2820 non-void function. */ 2821 jump_over_else = block_may_fallthru (then_); 2822 2823 pred = shortcut_cond_r (pred, true_label_p, false_label_p, 2824 EXPR_LOC_OR_LOC (expr, input_location)); 2825 2826 expr = NULL; 2827 append_to_statement_list (pred, &expr); 2828 2829 append_to_statement_list (then_, &expr); 2830 if (else_se) 2831 { 2832 if (jump_over_else) 2833 { 2834 tree last = expr_last (expr); 2835 t = build_and_jump (&end_label); 2836 if (EXPR_HAS_LOCATION (last)) 2837 SET_EXPR_LOCATION (t, EXPR_LOCATION (last)); 2838 append_to_statement_list (t, &expr); 2839 } 2840 if (emit_false) 2841 { 2842 t = build1 (LABEL_EXPR, void_type_node, false_label); 2843 append_to_statement_list (t, &expr); 2844 } 2845 append_to_statement_list (else_, &expr); 2846 } 2847 if (emit_end && end_label) 2848 { 2849 t = build1 (LABEL_EXPR, void_type_node, end_label); 2850 append_to_statement_list (t, &expr); 2851 } 2852 2853 return expr; 2854 } 2855 2856 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */ 2857 2858 tree 2859 gimple_boolify (tree expr) 2860 { 2861 tree type = TREE_TYPE (expr); 2862 location_t loc = EXPR_LOCATION (expr); 2863 2864 if (TREE_CODE (expr) == NE_EXPR 2865 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR 2866 && integer_zerop (TREE_OPERAND (expr, 1))) 2867 { 2868 tree call = TREE_OPERAND (expr, 0); 2869 tree fn = get_callee_fndecl (call); 2870 2871 /* For __builtin_expect ((long) (x), y) recurse into x as well 2872 if x is truth_value_p. */ 2873 if (fn 2874 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL 2875 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT 2876 && call_expr_nargs (call) == 2) 2877 { 2878 tree arg = CALL_EXPR_ARG (call, 0); 2879 if (arg) 2880 { 2881 if (TREE_CODE (arg) == NOP_EXPR 2882 && TREE_TYPE (arg) == TREE_TYPE (call)) 2883 arg = TREE_OPERAND (arg, 0); 2884 if (truth_value_p (TREE_CODE (arg))) 2885 { 2886 arg = gimple_boolify (arg); 2887 CALL_EXPR_ARG (call, 0) 2888 = fold_convert_loc (loc, TREE_TYPE (call), arg); 2889 } 2890 } 2891 } 2892 } 2893 2894 switch (TREE_CODE (expr)) 2895 { 2896 case TRUTH_AND_EXPR: 2897 case TRUTH_OR_EXPR: 2898 case TRUTH_XOR_EXPR: 2899 case TRUTH_ANDIF_EXPR: 2900 case TRUTH_ORIF_EXPR: 2901 /* Also boolify the arguments of truth exprs. */ 2902 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1)); 2903 /* FALLTHRU */ 2904 2905 case TRUTH_NOT_EXPR: 2906 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); 2907 2908 /* These expressions always produce boolean results. */ 2909 if (TREE_CODE (type) != BOOLEAN_TYPE) 2910 TREE_TYPE (expr) = boolean_type_node; 2911 return expr; 2912 2913 case ANNOTATE_EXPR: 2914 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1))) 2915 { 2916 case annot_expr_ivdep_kind: 2917 case annot_expr_no_vector_kind: 2918 case annot_expr_vector_kind: 2919 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); 2920 if (TREE_CODE (type) != BOOLEAN_TYPE) 2921 TREE_TYPE (expr) = boolean_type_node; 2922 return expr; 2923 default: 2924 gcc_unreachable (); 2925 } 2926 2927 default: 2928 if (COMPARISON_CLASS_P (expr)) 2929 { 2930 /* There expressions always prduce boolean results. */ 2931 if (TREE_CODE (type) != BOOLEAN_TYPE) 2932 TREE_TYPE (expr) = boolean_type_node; 2933 return expr; 2934 } 2935 /* Other expressions that get here must have boolean values, but 2936 might need to be converted to the appropriate mode. */ 2937 if (TREE_CODE (type) == BOOLEAN_TYPE) 2938 return expr; 2939 return fold_convert_loc (loc, boolean_type_node, expr); 2940 } 2941 } 2942 2943 /* Given a conditional expression *EXPR_P without side effects, gimplify 2944 its operands. New statements are inserted to PRE_P. */ 2945 2946 static enum gimplify_status 2947 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p) 2948 { 2949 tree expr = *expr_p, cond; 2950 enum gimplify_status ret, tret; 2951 enum tree_code code; 2952 2953 cond = gimple_boolify (COND_EXPR_COND (expr)); 2954 2955 /* We need to handle && and || specially, as their gimplification 2956 creates pure cond_expr, thus leading to an infinite cycle otherwise. */ 2957 code = TREE_CODE (cond); 2958 if (code == TRUTH_ANDIF_EXPR) 2959 TREE_SET_CODE (cond, TRUTH_AND_EXPR); 2960 else if (code == TRUTH_ORIF_EXPR) 2961 TREE_SET_CODE (cond, TRUTH_OR_EXPR); 2962 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue); 2963 COND_EXPR_COND (*expr_p) = cond; 2964 2965 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL, 2966 is_gimple_val, fb_rvalue); 2967 ret = MIN (ret, tret); 2968 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL, 2969 is_gimple_val, fb_rvalue); 2970 2971 return MIN (ret, tret); 2972 } 2973 2974 /* Return true if evaluating EXPR could trap. 2975 EXPR is GENERIC, while tree_could_trap_p can be called 2976 only on GIMPLE. */ 2977 2978 static bool 2979 generic_expr_could_trap_p (tree expr) 2980 { 2981 unsigned i, n; 2982 2983 if (!expr || is_gimple_val (expr)) 2984 return false; 2985 2986 if (!EXPR_P (expr) || tree_could_trap_p (expr)) 2987 return true; 2988 2989 n = TREE_OPERAND_LENGTH (expr); 2990 for (i = 0; i < n; i++) 2991 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i))) 2992 return true; 2993 2994 return false; 2995 } 2996 2997 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;' 2998 into 2999 3000 if (p) if (p) 3001 t1 = a; a; 3002 else or else 3003 t1 = b; b; 3004 t1; 3005 3006 The second form is used when *EXPR_P is of type void. 3007 3008 PRE_P points to the list where side effects that must happen before 3009 *EXPR_P should be stored. */ 3010 3011 static enum gimplify_status 3012 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback) 3013 { 3014 tree expr = *expr_p; 3015 tree type = TREE_TYPE (expr); 3016 location_t loc = EXPR_LOCATION (expr); 3017 tree tmp, arm1, arm2; 3018 enum gimplify_status ret; 3019 tree label_true, label_false, label_cont; 3020 bool have_then_clause_p, have_else_clause_p; 3021 gcond *cond_stmt; 3022 enum tree_code pred_code; 3023 gimple_seq seq = NULL; 3024 3025 /* If this COND_EXPR has a value, copy the values into a temporary within 3026 the arms. */ 3027 if (!VOID_TYPE_P (type)) 3028 { 3029 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2); 3030 tree result; 3031 3032 /* If either an rvalue is ok or we do not require an lvalue, create the 3033 temporary. But we cannot do that if the type is addressable. */ 3034 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue)) 3035 && !TREE_ADDRESSABLE (type)) 3036 { 3037 if (gimplify_ctxp->allow_rhs_cond_expr 3038 /* If either branch has side effects or could trap, it can't be 3039 evaluated unconditionally. */ 3040 && !TREE_SIDE_EFFECTS (then_) 3041 && !generic_expr_could_trap_p (then_) 3042 && !TREE_SIDE_EFFECTS (else_) 3043 && !generic_expr_could_trap_p (else_)) 3044 return gimplify_pure_cond_expr (expr_p, pre_p); 3045 3046 tmp = create_tmp_var (type, "iftmp"); 3047 result = tmp; 3048 } 3049 3050 /* Otherwise, only create and copy references to the values. */ 3051 else 3052 { 3053 type = build_pointer_type (type); 3054 3055 if (!VOID_TYPE_P (TREE_TYPE (then_))) 3056 then_ = build_fold_addr_expr_loc (loc, then_); 3057 3058 if (!VOID_TYPE_P (TREE_TYPE (else_))) 3059 else_ = build_fold_addr_expr_loc (loc, else_); 3060 3061 expr 3062 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_); 3063 3064 tmp = create_tmp_var (type, "iftmp"); 3065 result = build_simple_mem_ref_loc (loc, tmp); 3066 } 3067 3068 /* Build the new then clause, `tmp = then_;'. But don't build the 3069 assignment if the value is void; in C++ it can be if it's a throw. */ 3070 if (!VOID_TYPE_P (TREE_TYPE (then_))) 3071 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_); 3072 3073 /* Similarly, build the new else clause, `tmp = else_;'. */ 3074 if (!VOID_TYPE_P (TREE_TYPE (else_))) 3075 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_); 3076 3077 TREE_TYPE (expr) = void_type_node; 3078 recalculate_side_effects (expr); 3079 3080 /* Move the COND_EXPR to the prequeue. */ 3081 gimplify_stmt (&expr, pre_p); 3082 3083 *expr_p = result; 3084 return GS_ALL_DONE; 3085 } 3086 3087 /* Remove any COMPOUND_EXPR so the following cases will be caught. */ 3088 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0)); 3089 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR) 3090 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true); 3091 3092 /* Make sure the condition has BOOLEAN_TYPE. */ 3093 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0)); 3094 3095 /* Break apart && and || conditions. */ 3096 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR 3097 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR) 3098 { 3099 expr = shortcut_cond_expr (expr); 3100 3101 if (expr != *expr_p) 3102 { 3103 *expr_p = expr; 3104 3105 /* We can't rely on gimplify_expr to re-gimplify the expanded 3106 form properly, as cleanups might cause the target labels to be 3107 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to 3108 set up a conditional context. */ 3109 gimple_push_condition (); 3110 gimplify_stmt (expr_p, &seq); 3111 gimple_pop_condition (pre_p); 3112 gimple_seq_add_seq (pre_p, seq); 3113 3114 return GS_ALL_DONE; 3115 } 3116 } 3117 3118 /* Now do the normal gimplification. */ 3119 3120 /* Gimplify condition. */ 3121 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr, 3122 fb_rvalue); 3123 if (ret == GS_ERROR) 3124 return GS_ERROR; 3125 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE); 3126 3127 gimple_push_condition (); 3128 3129 have_then_clause_p = have_else_clause_p = false; 3130 if (TREE_OPERAND (expr, 1) != NULL 3131 && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR 3132 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL 3133 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) 3134 == current_function_decl) 3135 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR 3136 have different locations, otherwise we end up with incorrect 3137 location information on the branches. */ 3138 && (optimize 3139 || !EXPR_HAS_LOCATION (expr) 3140 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1)) 3141 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1)))) 3142 { 3143 label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1)); 3144 have_then_clause_p = true; 3145 } 3146 else 3147 label_true = create_artificial_label (UNKNOWN_LOCATION); 3148 if (TREE_OPERAND (expr, 2) != NULL 3149 && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR 3150 && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL 3151 && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) 3152 == current_function_decl) 3153 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR 3154 have different locations, otherwise we end up with incorrect 3155 location information on the branches. */ 3156 && (optimize 3157 || !EXPR_HAS_LOCATION (expr) 3158 || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2)) 3159 || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2)))) 3160 { 3161 label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2)); 3162 have_else_clause_p = true; 3163 } 3164 else 3165 label_false = create_artificial_label (UNKNOWN_LOCATION); 3166 3167 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1, 3168 &arm2); 3169 3170 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, 3171 label_false); 3172 3173 gimplify_seq_add_stmt (&seq, cond_stmt); 3174 label_cont = NULL_TREE; 3175 if (!have_then_clause_p) 3176 { 3177 /* For if (...) {} else { code; } put label_true after 3178 the else block. */ 3179 if (TREE_OPERAND (expr, 1) == NULL_TREE 3180 && !have_else_clause_p 3181 && TREE_OPERAND (expr, 2) != NULL_TREE) 3182 label_cont = label_true; 3183 else 3184 { 3185 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true)); 3186 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq); 3187 /* For if (...) { code; } else {} or 3188 if (...) { code; } else goto label; or 3189 if (...) { code; return; } else { ... } 3190 label_cont isn't needed. */ 3191 if (!have_else_clause_p 3192 && TREE_OPERAND (expr, 2) != NULL_TREE 3193 && gimple_seq_may_fallthru (seq)) 3194 { 3195 gimple g; 3196 label_cont = create_artificial_label (UNKNOWN_LOCATION); 3197 3198 g = gimple_build_goto (label_cont); 3199 3200 /* GIMPLE_COND's are very low level; they have embedded 3201 gotos. This particular embedded goto should not be marked 3202 with the location of the original COND_EXPR, as it would 3203 correspond to the COND_EXPR's condition, not the ELSE or the 3204 THEN arms. To avoid marking it with the wrong location, flag 3205 it as "no location". */ 3206 gimple_set_do_not_emit_location (g); 3207 3208 gimplify_seq_add_stmt (&seq, g); 3209 } 3210 } 3211 } 3212 if (!have_else_clause_p) 3213 { 3214 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false)); 3215 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq); 3216 } 3217 if (label_cont) 3218 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont)); 3219 3220 gimple_pop_condition (pre_p); 3221 gimple_seq_add_seq (pre_p, seq); 3222 3223 if (ret == GS_ERROR) 3224 ; /* Do nothing. */ 3225 else if (have_then_clause_p || have_else_clause_p) 3226 ret = GS_ALL_DONE; 3227 else 3228 { 3229 /* Both arms are empty; replace the COND_EXPR with its predicate. */ 3230 expr = TREE_OPERAND (expr, 0); 3231 gimplify_stmt (&expr, pre_p); 3232 } 3233 3234 *expr_p = NULL; 3235 return ret; 3236 } 3237 3238 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression, 3239 to be marked addressable. 3240 3241 We cannot rely on such an expression being directly markable if a temporary 3242 has been created by the gimplification. In this case, we create another 3243 temporary and initialize it with a copy, which will become a store after we 3244 mark it addressable. This can happen if the front-end passed us something 3245 that it could not mark addressable yet, like a Fortran pass-by-reference 3246 parameter (int) floatvar. */ 3247 3248 static void 3249 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p) 3250 { 3251 while (handled_component_p (*expr_p)) 3252 expr_p = &TREE_OPERAND (*expr_p, 0); 3253 if (is_gimple_reg (*expr_p)) 3254 { 3255 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL); 3256 DECL_GIMPLE_REG_P (var) = 0; 3257 *expr_p = var; 3258 } 3259 } 3260 3261 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with 3262 a call to __builtin_memcpy. */ 3263 3264 static enum gimplify_status 3265 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value, 3266 gimple_seq *seq_p) 3267 { 3268 tree t, to, to_ptr, from, from_ptr; 3269 gcall *gs; 3270 location_t loc = EXPR_LOCATION (*expr_p); 3271 3272 to = TREE_OPERAND (*expr_p, 0); 3273 from = TREE_OPERAND (*expr_p, 1); 3274 3275 /* Mark the RHS addressable. Beware that it may not be possible to do so 3276 directly if a temporary has been created by the gimplification. */ 3277 prepare_gimple_addressable (&from, seq_p); 3278 3279 mark_addressable (from); 3280 from_ptr = build_fold_addr_expr_loc (loc, from); 3281 gimplify_arg (&from_ptr, seq_p, loc); 3282 3283 mark_addressable (to); 3284 to_ptr = build_fold_addr_expr_loc (loc, to); 3285 gimplify_arg (&to_ptr, seq_p, loc); 3286 3287 t = builtin_decl_implicit (BUILT_IN_MEMCPY); 3288 3289 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size); 3290 3291 if (want_value) 3292 { 3293 /* tmp = memcpy() */ 3294 t = create_tmp_var (TREE_TYPE (to_ptr)); 3295 gimple_call_set_lhs (gs, t); 3296 gimplify_seq_add_stmt (seq_p, gs); 3297 3298 *expr_p = build_simple_mem_ref (t); 3299 return GS_ALL_DONE; 3300 } 3301 3302 gimplify_seq_add_stmt (seq_p, gs); 3303 *expr_p = NULL; 3304 return GS_ALL_DONE; 3305 } 3306 3307 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with 3308 a call to __builtin_memset. In this case we know that the RHS is 3309 a CONSTRUCTOR with an empty element list. */ 3310 3311 static enum gimplify_status 3312 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value, 3313 gimple_seq *seq_p) 3314 { 3315 tree t, from, to, to_ptr; 3316 gcall *gs; 3317 location_t loc = EXPR_LOCATION (*expr_p); 3318 3319 /* Assert our assumptions, to abort instead of producing wrong code 3320 silently if they are not met. Beware that the RHS CONSTRUCTOR might 3321 not be immediately exposed. */ 3322 from = TREE_OPERAND (*expr_p, 1); 3323 if (TREE_CODE (from) == WITH_SIZE_EXPR) 3324 from = TREE_OPERAND (from, 0); 3325 3326 gcc_assert (TREE_CODE (from) == CONSTRUCTOR 3327 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from))); 3328 3329 /* Now proceed. */ 3330 to = TREE_OPERAND (*expr_p, 0); 3331 3332 to_ptr = build_fold_addr_expr_loc (loc, to); 3333 gimplify_arg (&to_ptr, seq_p, loc); 3334 t = builtin_decl_implicit (BUILT_IN_MEMSET); 3335 3336 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size); 3337 3338 if (want_value) 3339 { 3340 /* tmp = memset() */ 3341 t = create_tmp_var (TREE_TYPE (to_ptr)); 3342 gimple_call_set_lhs (gs, t); 3343 gimplify_seq_add_stmt (seq_p, gs); 3344 3345 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t); 3346 return GS_ALL_DONE; 3347 } 3348 3349 gimplify_seq_add_stmt (seq_p, gs); 3350 *expr_p = NULL; 3351 return GS_ALL_DONE; 3352 } 3353 3354 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree, 3355 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an 3356 assignment. Return non-null if we detect a potential overlap. */ 3357 3358 struct gimplify_init_ctor_preeval_data 3359 { 3360 /* The base decl of the lhs object. May be NULL, in which case we 3361 have to assume the lhs is indirect. */ 3362 tree lhs_base_decl; 3363 3364 /* The alias set of the lhs object. */ 3365 alias_set_type lhs_alias_set; 3366 }; 3367 3368 static tree 3369 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata) 3370 { 3371 struct gimplify_init_ctor_preeval_data *data 3372 = (struct gimplify_init_ctor_preeval_data *) xdata; 3373 tree t = *tp; 3374 3375 /* If we find the base object, obviously we have overlap. */ 3376 if (data->lhs_base_decl == t) 3377 return t; 3378 3379 /* If the constructor component is indirect, determine if we have a 3380 potential overlap with the lhs. The only bits of information we 3381 have to go on at this point are addressability and alias sets. */ 3382 if ((INDIRECT_REF_P (t) 3383 || TREE_CODE (t) == MEM_REF) 3384 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl)) 3385 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t))) 3386 return t; 3387 3388 /* If the constructor component is a call, determine if it can hide a 3389 potential overlap with the lhs through an INDIRECT_REF like above. 3390 ??? Ugh - this is completely broken. In fact this whole analysis 3391 doesn't look conservative. */ 3392 if (TREE_CODE (t) == CALL_EXPR) 3393 { 3394 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t))); 3395 3396 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type)) 3397 if (POINTER_TYPE_P (TREE_VALUE (type)) 3398 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl)) 3399 && alias_sets_conflict_p (data->lhs_alias_set, 3400 get_alias_set 3401 (TREE_TYPE (TREE_VALUE (type))))) 3402 return t; 3403 } 3404 3405 if (IS_TYPE_OR_DECL_P (t)) 3406 *walk_subtrees = 0; 3407 return NULL; 3408 } 3409 3410 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR, 3411 force values that overlap with the lhs (as described by *DATA) 3412 into temporaries. */ 3413 3414 static void 3415 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 3416 struct gimplify_init_ctor_preeval_data *data) 3417 { 3418 enum gimplify_status one; 3419 3420 /* If the value is constant, then there's nothing to pre-evaluate. */ 3421 if (TREE_CONSTANT (*expr_p)) 3422 { 3423 /* Ensure it does not have side effects, it might contain a reference to 3424 the object we're initializing. */ 3425 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p)); 3426 return; 3427 } 3428 3429 /* If the type has non-trivial constructors, we can't pre-evaluate. */ 3430 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p))) 3431 return; 3432 3433 /* Recurse for nested constructors. */ 3434 if (TREE_CODE (*expr_p) == CONSTRUCTOR) 3435 { 3436 unsigned HOST_WIDE_INT ix; 3437 constructor_elt *ce; 3438 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p); 3439 3440 FOR_EACH_VEC_SAFE_ELT (v, ix, ce) 3441 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data); 3442 3443 return; 3444 } 3445 3446 /* If this is a variable sized type, we must remember the size. */ 3447 maybe_with_size_expr (expr_p); 3448 3449 /* Gimplify the constructor element to something appropriate for the rhs 3450 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know 3451 the gimplifier will consider this a store to memory. Doing this 3452 gimplification now means that we won't have to deal with complicated 3453 language-specific trees, nor trees like SAVE_EXPR that can induce 3454 exponential search behavior. */ 3455 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue); 3456 if (one == GS_ERROR) 3457 { 3458 *expr_p = NULL; 3459 return; 3460 } 3461 3462 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap 3463 with the lhs, since "a = { .x=a }" doesn't make sense. This will 3464 always be true for all scalars, since is_gimple_mem_rhs insists on a 3465 temporary variable for them. */ 3466 if (DECL_P (*expr_p)) 3467 return; 3468 3469 /* If this is of variable size, we have no choice but to assume it doesn't 3470 overlap since we can't make a temporary for it. */ 3471 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST) 3472 return; 3473 3474 /* Otherwise, we must search for overlap ... */ 3475 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL)) 3476 return; 3477 3478 /* ... and if found, force the value into a temporary. */ 3479 *expr_p = get_formal_tmp_var (*expr_p, pre_p); 3480 } 3481 3482 /* A subroutine of gimplify_init_ctor_eval. Create a loop for 3483 a RANGE_EXPR in a CONSTRUCTOR for an array. 3484 3485 var = lower; 3486 loop_entry: 3487 object[var] = value; 3488 if (var == upper) 3489 goto loop_exit; 3490 var = var + 1; 3491 goto loop_entry; 3492 loop_exit: 3493 3494 We increment var _after_ the loop exit check because we might otherwise 3495 fail if upper == TYPE_MAX_VALUE (type for upper). 3496 3497 Note that we never have to deal with SAVE_EXPRs here, because this has 3498 already been taken care of for us, in gimplify_init_ctor_preeval(). */ 3499 3500 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *, 3501 gimple_seq *, bool); 3502 3503 static void 3504 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper, 3505 tree value, tree array_elt_type, 3506 gimple_seq *pre_p, bool cleared) 3507 { 3508 tree loop_entry_label, loop_exit_label, fall_thru_label; 3509 tree var, var_type, cref, tmp; 3510 3511 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION); 3512 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION); 3513 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION); 3514 3515 /* Create and initialize the index variable. */ 3516 var_type = TREE_TYPE (upper); 3517 var = create_tmp_var (var_type); 3518 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower)); 3519 3520 /* Add the loop entry label. */ 3521 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label)); 3522 3523 /* Build the reference. */ 3524 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object), 3525 var, NULL_TREE, NULL_TREE); 3526 3527 /* If we are a constructor, just call gimplify_init_ctor_eval to do 3528 the store. Otherwise just assign value to the reference. */ 3529 3530 if (TREE_CODE (value) == CONSTRUCTOR) 3531 /* NB we might have to call ourself recursively through 3532 gimplify_init_ctor_eval if the value is a constructor. */ 3533 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value), 3534 pre_p, cleared); 3535 else 3536 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value)); 3537 3538 /* We exit the loop when the index var is equal to the upper bound. */ 3539 gimplify_seq_add_stmt (pre_p, 3540 gimple_build_cond (EQ_EXPR, var, upper, 3541 loop_exit_label, fall_thru_label)); 3542 3543 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label)); 3544 3545 /* Otherwise, increment the index var... */ 3546 tmp = build2 (PLUS_EXPR, var_type, var, 3547 fold_convert (var_type, integer_one_node)); 3548 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp)); 3549 3550 /* ...and jump back to the loop entry. */ 3551 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label)); 3552 3553 /* Add the loop exit label. */ 3554 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label)); 3555 } 3556 3557 /* Return true if FDECL is accessing a field that is zero sized. */ 3558 3559 static bool 3560 zero_sized_field_decl (const_tree fdecl) 3561 { 3562 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl) 3563 && integer_zerop (DECL_SIZE (fdecl))) 3564 return true; 3565 return false; 3566 } 3567 3568 /* Return true if TYPE is zero sized. */ 3569 3570 static bool 3571 zero_sized_type (const_tree type) 3572 { 3573 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type) 3574 && integer_zerop (TYPE_SIZE (type))) 3575 return true; 3576 return false; 3577 } 3578 3579 /* A subroutine of gimplify_init_constructor. Generate individual 3580 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the 3581 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the 3582 CONSTRUCTOR. CLEARED is true if the entire LHS object has been 3583 zeroed first. */ 3584 3585 static void 3586 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts, 3587 gimple_seq *pre_p, bool cleared) 3588 { 3589 tree array_elt_type = NULL; 3590 unsigned HOST_WIDE_INT ix; 3591 tree purpose, value; 3592 3593 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE) 3594 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object))); 3595 3596 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value) 3597 { 3598 tree cref; 3599 3600 /* NULL values are created above for gimplification errors. */ 3601 if (value == NULL) 3602 continue; 3603 3604 if (cleared && initializer_zerop (value)) 3605 continue; 3606 3607 /* ??? Here's to hoping the front end fills in all of the indices, 3608 so we don't have to figure out what's missing ourselves. */ 3609 gcc_assert (purpose); 3610 3611 /* Skip zero-sized fields, unless value has side-effects. This can 3612 happen with calls to functions returning a zero-sized type, which 3613 we shouldn't discard. As a number of downstream passes don't 3614 expect sets of zero-sized fields, we rely on the gimplification of 3615 the MODIFY_EXPR we make below to drop the assignment statement. */ 3616 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose)) 3617 continue; 3618 3619 /* If we have a RANGE_EXPR, we have to build a loop to assign the 3620 whole range. */ 3621 if (TREE_CODE (purpose) == RANGE_EXPR) 3622 { 3623 tree lower = TREE_OPERAND (purpose, 0); 3624 tree upper = TREE_OPERAND (purpose, 1); 3625 3626 /* If the lower bound is equal to upper, just treat it as if 3627 upper was the index. */ 3628 if (simple_cst_equal (lower, upper)) 3629 purpose = upper; 3630 else 3631 { 3632 gimplify_init_ctor_eval_range (object, lower, upper, value, 3633 array_elt_type, pre_p, cleared); 3634 continue; 3635 } 3636 } 3637 3638 if (array_elt_type) 3639 { 3640 /* Do not use bitsizetype for ARRAY_REF indices. */ 3641 if (TYPE_DOMAIN (TREE_TYPE (object))) 3642 purpose 3643 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))), 3644 purpose); 3645 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object), 3646 purpose, NULL_TREE, NULL_TREE); 3647 } 3648 else 3649 { 3650 gcc_assert (TREE_CODE (purpose) == FIELD_DECL); 3651 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose), 3652 unshare_expr (object), purpose, NULL_TREE); 3653 } 3654 3655 if (TREE_CODE (value) == CONSTRUCTOR 3656 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE) 3657 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value), 3658 pre_p, cleared); 3659 else 3660 { 3661 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value); 3662 gimplify_and_add (init, pre_p); 3663 ggc_free (init); 3664 } 3665 } 3666 } 3667 3668 /* Return the appropriate RHS predicate for this LHS. */ 3669 3670 gimple_predicate 3671 rhs_predicate_for (tree lhs) 3672 { 3673 if (is_gimple_reg (lhs)) 3674 return is_gimple_reg_rhs_or_call; 3675 else 3676 return is_gimple_mem_rhs_or_call; 3677 } 3678 3679 /* Gimplify a C99 compound literal expression. This just means adding 3680 the DECL_EXPR before the current statement and using its anonymous 3681 decl instead. */ 3682 3683 static enum gimplify_status 3684 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p, 3685 bool (*gimple_test_f) (tree), 3686 fallback_t fallback) 3687 { 3688 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p); 3689 tree decl = DECL_EXPR_DECL (decl_s); 3690 tree init = DECL_INITIAL (decl); 3691 /* Mark the decl as addressable if the compound literal 3692 expression is addressable now, otherwise it is marked too late 3693 after we gimplify the initialization expression. */ 3694 if (TREE_ADDRESSABLE (*expr_p)) 3695 TREE_ADDRESSABLE (decl) = 1; 3696 /* Otherwise, if we don't need an lvalue and have a literal directly 3697 substitute it. Check if it matches the gimple predicate, as 3698 otherwise we'd generate a new temporary, and we can as well just 3699 use the decl we already have. */ 3700 else if (!TREE_ADDRESSABLE (decl) 3701 && init 3702 && (fallback & fb_lvalue) == 0 3703 && gimple_test_f (init)) 3704 { 3705 *expr_p = init; 3706 return GS_OK; 3707 } 3708 3709 /* Preliminarily mark non-addressed complex variables as eligible 3710 for promotion to gimple registers. We'll transform their uses 3711 as we find them. */ 3712 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE 3713 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE) 3714 && !TREE_THIS_VOLATILE (decl) 3715 && !needs_to_live_in_memory (decl)) 3716 DECL_GIMPLE_REG_P (decl) = 1; 3717 3718 /* If the decl is not addressable, then it is being used in some 3719 expression or on the right hand side of a statement, and it can 3720 be put into a readonly data section. */ 3721 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0) 3722 TREE_READONLY (decl) = 1; 3723 3724 /* This decl isn't mentioned in the enclosing block, so add it to the 3725 list of temps. FIXME it seems a bit of a kludge to say that 3726 anonymous artificial vars aren't pushed, but everything else is. */ 3727 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl)) 3728 gimple_add_tmp_var (decl); 3729 3730 gimplify_and_add (decl_s, pre_p); 3731 *expr_p = decl; 3732 return GS_OK; 3733 } 3734 3735 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR, 3736 return a new CONSTRUCTOR if something changed. */ 3737 3738 static tree 3739 optimize_compound_literals_in_ctor (tree orig_ctor) 3740 { 3741 tree ctor = orig_ctor; 3742 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor); 3743 unsigned int idx, num = vec_safe_length (elts); 3744 3745 for (idx = 0; idx < num; idx++) 3746 { 3747 tree value = (*elts)[idx].value; 3748 tree newval = value; 3749 if (TREE_CODE (value) == CONSTRUCTOR) 3750 newval = optimize_compound_literals_in_ctor (value); 3751 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR) 3752 { 3753 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value); 3754 tree decl = DECL_EXPR_DECL (decl_s); 3755 tree init = DECL_INITIAL (decl); 3756 3757 if (!TREE_ADDRESSABLE (value) 3758 && !TREE_ADDRESSABLE (decl) 3759 && init 3760 && TREE_CODE (init) == CONSTRUCTOR) 3761 newval = optimize_compound_literals_in_ctor (init); 3762 } 3763 if (newval == value) 3764 continue; 3765 3766 if (ctor == orig_ctor) 3767 { 3768 ctor = copy_node (orig_ctor); 3769 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts); 3770 elts = CONSTRUCTOR_ELTS (ctor); 3771 } 3772 (*elts)[idx].value = newval; 3773 } 3774 return ctor; 3775 } 3776 3777 /* A subroutine of gimplify_modify_expr. Break out elements of a 3778 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs. 3779 3780 Note that we still need to clear any elements that don't have explicit 3781 initializers, so if not all elements are initialized we keep the 3782 original MODIFY_EXPR, we just remove all of the constructor elements. 3783 3784 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return 3785 GS_ERROR if we would have to create a temporary when gimplifying 3786 this constructor. Otherwise, return GS_OK. 3787 3788 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */ 3789 3790 static enum gimplify_status 3791 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 3792 bool want_value, bool notify_temp_creation) 3793 { 3794 tree object, ctor, type; 3795 enum gimplify_status ret; 3796 vec<constructor_elt, va_gc> *elts; 3797 3798 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR); 3799 3800 if (!notify_temp_creation) 3801 { 3802 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 3803 is_gimple_lvalue, fb_lvalue); 3804 if (ret == GS_ERROR) 3805 return ret; 3806 } 3807 3808 object = TREE_OPERAND (*expr_p, 0); 3809 ctor = TREE_OPERAND (*expr_p, 1) = 3810 optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1)); 3811 type = TREE_TYPE (ctor); 3812 elts = CONSTRUCTOR_ELTS (ctor); 3813 ret = GS_ALL_DONE; 3814 3815 switch (TREE_CODE (type)) 3816 { 3817 case RECORD_TYPE: 3818 case UNION_TYPE: 3819 case QUAL_UNION_TYPE: 3820 case ARRAY_TYPE: 3821 { 3822 struct gimplify_init_ctor_preeval_data preeval_data; 3823 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements; 3824 bool cleared, complete_p, valid_const_initializer; 3825 3826 /* Aggregate types must lower constructors to initialization of 3827 individual elements. The exception is that a CONSTRUCTOR node 3828 with no elements indicates zero-initialization of the whole. */ 3829 if (vec_safe_is_empty (elts)) 3830 { 3831 if (notify_temp_creation) 3832 return GS_OK; 3833 break; 3834 } 3835 3836 /* Fetch information about the constructor to direct later processing. 3837 We might want to make static versions of it in various cases, and 3838 can only do so if it known to be a valid constant initializer. */ 3839 valid_const_initializer 3840 = categorize_ctor_elements (ctor, &num_nonzero_elements, 3841 &num_ctor_elements, &complete_p); 3842 3843 /* If a const aggregate variable is being initialized, then it 3844 should never be a lose to promote the variable to be static. */ 3845 if (valid_const_initializer 3846 && num_nonzero_elements > 1 3847 && TREE_READONLY (object) 3848 && TREE_CODE (object) == VAR_DECL 3849 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))) 3850 { 3851 if (notify_temp_creation) 3852 return GS_ERROR; 3853 DECL_INITIAL (object) = ctor; 3854 TREE_STATIC (object) = 1; 3855 if (!DECL_NAME (object)) 3856 DECL_NAME (object) = create_tmp_var_name ("C"); 3857 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL); 3858 3859 /* ??? C++ doesn't automatically append a .<number> to the 3860 assembler name, and even when it does, it looks at FE private 3861 data structures to figure out what that number should be, 3862 which are not set for this variable. I suppose this is 3863 important for local statics for inline functions, which aren't 3864 "local" in the object file sense. So in order to get a unique 3865 TU-local symbol, we must invoke the lhd version now. */ 3866 lhd_set_decl_assembler_name (object); 3867 3868 *expr_p = NULL_TREE; 3869 break; 3870 } 3871 3872 /* If there are "lots" of initialized elements, even discounting 3873 those that are not address constants (and thus *must* be 3874 computed at runtime), then partition the constructor into 3875 constant and non-constant parts. Block copy the constant 3876 parts in, then generate code for the non-constant parts. */ 3877 /* TODO. There's code in cp/typeck.c to do this. */ 3878 3879 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0) 3880 /* store_constructor will ignore the clearing of variable-sized 3881 objects. Initializers for such objects must explicitly set 3882 every field that needs to be set. */ 3883 cleared = false; 3884 else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor)) 3885 /* If the constructor isn't complete, clear the whole object 3886 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it. 3887 3888 ??? This ought not to be needed. For any element not present 3889 in the initializer, we should simply set them to zero. Except 3890 we'd need to *find* the elements that are not present, and that 3891 requires trickery to avoid quadratic compile-time behavior in 3892 large cases or excessive memory use in small cases. */ 3893 cleared = true; 3894 else if (num_ctor_elements - num_nonzero_elements 3895 > CLEAR_RATIO (optimize_function_for_speed_p (cfun)) 3896 && num_nonzero_elements < num_ctor_elements / 4) 3897 /* If there are "lots" of zeros, it's more efficient to clear 3898 the memory and then set the nonzero elements. */ 3899 cleared = true; 3900 else 3901 cleared = false; 3902 3903 /* If there are "lots" of initialized elements, and all of them 3904 are valid address constants, then the entire initializer can 3905 be dropped to memory, and then memcpy'd out. Don't do this 3906 for sparse arrays, though, as it's more efficient to follow 3907 the standard CONSTRUCTOR behavior of memset followed by 3908 individual element initialization. Also don't do this for small 3909 all-zero initializers (which aren't big enough to merit 3910 clearing), and don't try to make bitwise copies of 3911 TREE_ADDRESSABLE types. 3912 3913 We cannot apply such transformation when compiling chkp static 3914 initializer because creation of initializer image in the memory 3915 will require static initialization of bounds for it. It should 3916 result in another gimplification of similar initializer and we 3917 may fall into infinite loop. */ 3918 if (valid_const_initializer 3919 && !(cleared || num_nonzero_elements == 0) 3920 && !TREE_ADDRESSABLE (type) 3921 && (!current_function_decl 3922 || !lookup_attribute ("chkp ctor", 3923 DECL_ATTRIBUTES (current_function_decl)))) 3924 { 3925 HOST_WIDE_INT size = int_size_in_bytes (type); 3926 unsigned int align; 3927 3928 /* ??? We can still get unbounded array types, at least 3929 from the C++ front end. This seems wrong, but attempt 3930 to work around it for now. */ 3931 if (size < 0) 3932 { 3933 size = int_size_in_bytes (TREE_TYPE (object)); 3934 if (size >= 0) 3935 TREE_TYPE (ctor) = type = TREE_TYPE (object); 3936 } 3937 3938 /* Find the maximum alignment we can assume for the object. */ 3939 /* ??? Make use of DECL_OFFSET_ALIGN. */ 3940 if (DECL_P (object)) 3941 align = DECL_ALIGN (object); 3942 else 3943 align = TYPE_ALIGN (type); 3944 3945 /* Do a block move either if the size is so small as to make 3946 each individual move a sub-unit move on average, or if it 3947 is so large as to make individual moves inefficient. */ 3948 if (size > 0 3949 && num_nonzero_elements > 1 3950 && (size < num_nonzero_elements 3951 || !can_move_by_pieces (size, align))) 3952 { 3953 if (notify_temp_creation) 3954 return GS_ERROR; 3955 3956 walk_tree (&ctor, force_labels_r, NULL, NULL); 3957 ctor = tree_output_constant_def (ctor); 3958 if (!useless_type_conversion_p (type, TREE_TYPE (ctor))) 3959 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor); 3960 TREE_OPERAND (*expr_p, 1) = ctor; 3961 3962 /* This is no longer an assignment of a CONSTRUCTOR, but 3963 we still may have processing to do on the LHS. So 3964 pretend we didn't do anything here to let that happen. */ 3965 return GS_UNHANDLED; 3966 } 3967 } 3968 3969 /* If the target is volatile, we have non-zero elements and more than 3970 one field to assign, initialize the target from a temporary. */ 3971 if (TREE_THIS_VOLATILE (object) 3972 && !TREE_ADDRESSABLE (type) 3973 && num_nonzero_elements > 0 3974 && vec_safe_length (elts) > 1) 3975 { 3976 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type)); 3977 TREE_OPERAND (*expr_p, 0) = temp; 3978 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p), 3979 *expr_p, 3980 build2 (MODIFY_EXPR, void_type_node, 3981 object, temp)); 3982 return GS_OK; 3983 } 3984 3985 if (notify_temp_creation) 3986 return GS_OK; 3987 3988 /* If there are nonzero elements and if needed, pre-evaluate to capture 3989 elements overlapping with the lhs into temporaries. We must do this 3990 before clearing to fetch the values before they are zeroed-out. */ 3991 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR) 3992 { 3993 preeval_data.lhs_base_decl = get_base_address (object); 3994 if (!DECL_P (preeval_data.lhs_base_decl)) 3995 preeval_data.lhs_base_decl = NULL; 3996 preeval_data.lhs_alias_set = get_alias_set (object); 3997 3998 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1), 3999 pre_p, post_p, &preeval_data); 4000 } 4001 4002 if (cleared) 4003 { 4004 /* Zap the CONSTRUCTOR element list, which simplifies this case. 4005 Note that we still have to gimplify, in order to handle the 4006 case of variable sized types. Avoid shared tree structures. */ 4007 CONSTRUCTOR_ELTS (ctor) = NULL; 4008 TREE_SIDE_EFFECTS (ctor) = 0; 4009 object = unshare_expr (object); 4010 gimplify_stmt (expr_p, pre_p); 4011 } 4012 4013 /* If we have not block cleared the object, or if there are nonzero 4014 elements in the constructor, add assignments to the individual 4015 scalar fields of the object. */ 4016 if (!cleared || num_nonzero_elements > 0) 4017 gimplify_init_ctor_eval (object, elts, pre_p, cleared); 4018 4019 *expr_p = NULL_TREE; 4020 } 4021 break; 4022 4023 case COMPLEX_TYPE: 4024 { 4025 tree r, i; 4026 4027 if (notify_temp_creation) 4028 return GS_OK; 4029 4030 /* Extract the real and imaginary parts out of the ctor. */ 4031 gcc_assert (elts->length () == 2); 4032 r = (*elts)[0].value; 4033 i = (*elts)[1].value; 4034 if (r == NULL || i == NULL) 4035 { 4036 tree zero = build_zero_cst (TREE_TYPE (type)); 4037 if (r == NULL) 4038 r = zero; 4039 if (i == NULL) 4040 i = zero; 4041 } 4042 4043 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to 4044 represent creation of a complex value. */ 4045 if (TREE_CONSTANT (r) && TREE_CONSTANT (i)) 4046 { 4047 ctor = build_complex (type, r, i); 4048 TREE_OPERAND (*expr_p, 1) = ctor; 4049 } 4050 else 4051 { 4052 ctor = build2 (COMPLEX_EXPR, type, r, i); 4053 TREE_OPERAND (*expr_p, 1) = ctor; 4054 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), 4055 pre_p, 4056 post_p, 4057 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)), 4058 fb_rvalue); 4059 } 4060 } 4061 break; 4062 4063 case VECTOR_TYPE: 4064 { 4065 unsigned HOST_WIDE_INT ix; 4066 constructor_elt *ce; 4067 4068 if (notify_temp_creation) 4069 return GS_OK; 4070 4071 /* Go ahead and simplify constant constructors to VECTOR_CST. */ 4072 if (TREE_CONSTANT (ctor)) 4073 { 4074 bool constant_p = true; 4075 tree value; 4076 4077 /* Even when ctor is constant, it might contain non-*_CST 4078 elements, such as addresses or trapping values like 4079 1.0/0.0 - 1.0/0.0. Such expressions don't belong 4080 in VECTOR_CST nodes. */ 4081 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value) 4082 if (!CONSTANT_CLASS_P (value)) 4083 { 4084 constant_p = false; 4085 break; 4086 } 4087 4088 if (constant_p) 4089 { 4090 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts); 4091 break; 4092 } 4093 4094 TREE_CONSTANT (ctor) = 0; 4095 } 4096 4097 /* Vector types use CONSTRUCTOR all the way through gimple 4098 compilation as a general initializer. */ 4099 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce) 4100 { 4101 enum gimplify_status tret; 4102 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val, 4103 fb_rvalue); 4104 if (tret == GS_ERROR) 4105 ret = GS_ERROR; 4106 else if (TREE_STATIC (ctor) 4107 && !initializer_constant_valid_p (ce->value, 4108 TREE_TYPE (ce->value))) 4109 TREE_STATIC (ctor) = 0; 4110 } 4111 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0))) 4112 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p); 4113 } 4114 break; 4115 4116 default: 4117 /* So how did we get a CONSTRUCTOR for a scalar type? */ 4118 gcc_unreachable (); 4119 } 4120 4121 if (ret == GS_ERROR) 4122 return GS_ERROR; 4123 /* If we have gimplified both sides of the initializer but have 4124 not emitted an assignment, do so now. */ 4125 if (*expr_p) 4126 { 4127 tree lhs = TREE_OPERAND (*expr_p, 0); 4128 tree rhs = TREE_OPERAND (*expr_p, 1); 4129 gassign *init = gimple_build_assign (lhs, rhs); 4130 gimplify_seq_add_stmt (pre_p, init); 4131 } 4132 if (want_value) 4133 { 4134 *expr_p = object; 4135 return GS_OK; 4136 } 4137 else 4138 { 4139 *expr_p = NULL; 4140 return GS_ALL_DONE; 4141 } 4142 } 4143 4144 /* Given a pointer value OP0, return a simplified version of an 4145 indirection through OP0, or NULL_TREE if no simplification is 4146 possible. This may only be applied to a rhs of an expression. 4147 Note that the resulting type may be different from the type pointed 4148 to in the sense that it is still compatible from the langhooks 4149 point of view. */ 4150 4151 static tree 4152 gimple_fold_indirect_ref_rhs (tree t) 4153 { 4154 return gimple_fold_indirect_ref (t); 4155 } 4156 4157 /* Subroutine of gimplify_modify_expr to do simplifications of 4158 MODIFY_EXPRs based on the code of the RHS. We loop for as long as 4159 something changes. */ 4160 4161 static enum gimplify_status 4162 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, 4163 gimple_seq *pre_p, gimple_seq *post_p, 4164 bool want_value) 4165 { 4166 enum gimplify_status ret = GS_UNHANDLED; 4167 bool changed; 4168 4169 do 4170 { 4171 changed = false; 4172 switch (TREE_CODE (*from_p)) 4173 { 4174 case VAR_DECL: 4175 /* If we're assigning from a read-only variable initialized with 4176 a constructor, do the direct assignment from the constructor, 4177 but only if neither source nor target are volatile since this 4178 latter assignment might end up being done on a per-field basis. */ 4179 if (DECL_INITIAL (*from_p) 4180 && TREE_READONLY (*from_p) 4181 && !TREE_THIS_VOLATILE (*from_p) 4182 && !TREE_THIS_VOLATILE (*to_p) 4183 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR) 4184 { 4185 tree old_from = *from_p; 4186 enum gimplify_status subret; 4187 4188 /* Move the constructor into the RHS. */ 4189 *from_p = unshare_expr (DECL_INITIAL (*from_p)); 4190 4191 /* Let's see if gimplify_init_constructor will need to put 4192 it in memory. */ 4193 subret = gimplify_init_constructor (expr_p, NULL, NULL, 4194 false, true); 4195 if (subret == GS_ERROR) 4196 { 4197 /* If so, revert the change. */ 4198 *from_p = old_from; 4199 } 4200 else 4201 { 4202 ret = GS_OK; 4203 changed = true; 4204 } 4205 } 4206 break; 4207 case INDIRECT_REF: 4208 { 4209 /* If we have code like 4210 4211 *(const A*)(A*)&x 4212 4213 where the type of "x" is a (possibly cv-qualified variant 4214 of "A"), treat the entire expression as identical to "x". 4215 This kind of code arises in C++ when an object is bound 4216 to a const reference, and if "x" is a TARGET_EXPR we want 4217 to take advantage of the optimization below. */ 4218 bool volatile_p = TREE_THIS_VOLATILE (*from_p); 4219 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0)); 4220 if (t) 4221 { 4222 if (TREE_THIS_VOLATILE (t) != volatile_p) 4223 { 4224 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration) 4225 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p), 4226 build_fold_addr_expr (t)); 4227 if (REFERENCE_CLASS_P (t)) 4228 TREE_THIS_VOLATILE (t) = volatile_p; 4229 } 4230 *from_p = t; 4231 ret = GS_OK; 4232 changed = true; 4233 } 4234 break; 4235 } 4236 4237 case TARGET_EXPR: 4238 { 4239 /* If we are initializing something from a TARGET_EXPR, strip the 4240 TARGET_EXPR and initialize it directly, if possible. This can't 4241 be done if the initializer is void, since that implies that the 4242 temporary is set in some non-trivial way. 4243 4244 ??? What about code that pulls out the temp and uses it 4245 elsewhere? I think that such code never uses the TARGET_EXPR as 4246 an initializer. If I'm wrong, we'll die because the temp won't 4247 have any RTL. In that case, I guess we'll need to replace 4248 references somehow. */ 4249 tree init = TARGET_EXPR_INITIAL (*from_p); 4250 4251 if (init 4252 && !VOID_TYPE_P (TREE_TYPE (init))) 4253 { 4254 *from_p = init; 4255 ret = GS_OK; 4256 changed = true; 4257 } 4258 } 4259 break; 4260 4261 case COMPOUND_EXPR: 4262 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be 4263 caught. */ 4264 gimplify_compound_expr (from_p, pre_p, true); 4265 ret = GS_OK; 4266 changed = true; 4267 break; 4268 4269 case CONSTRUCTOR: 4270 /* If we already made some changes, let the front end have a 4271 crack at this before we break it down. */ 4272 if (ret != GS_UNHANDLED) 4273 break; 4274 /* If we're initializing from a CONSTRUCTOR, break this into 4275 individual MODIFY_EXPRs. */ 4276 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value, 4277 false); 4278 4279 case COND_EXPR: 4280 /* If we're assigning to a non-register type, push the assignment 4281 down into the branches. This is mandatory for ADDRESSABLE types, 4282 since we cannot generate temporaries for such, but it saves a 4283 copy in other cases as well. */ 4284 if (!is_gimple_reg_type (TREE_TYPE (*from_p))) 4285 { 4286 /* This code should mirror the code in gimplify_cond_expr. */ 4287 enum tree_code code = TREE_CODE (*expr_p); 4288 tree cond = *from_p; 4289 tree result = *to_p; 4290 4291 ret = gimplify_expr (&result, pre_p, post_p, 4292 is_gimple_lvalue, fb_lvalue); 4293 if (ret != GS_ERROR) 4294 ret = GS_OK; 4295 4296 /* If we are going to write RESULT more than once, clear 4297 TREE_READONLY flag, otherwise we might incorrectly promote 4298 the variable to static const and initialize it at compile 4299 time in one of the branches. */ 4300 if (VAR_P (result) 4301 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node 4302 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node) 4303 TREE_READONLY (result) = 0; 4304 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node) 4305 TREE_OPERAND (cond, 1) 4306 = build2 (code, void_type_node, result, 4307 TREE_OPERAND (cond, 1)); 4308 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node) 4309 TREE_OPERAND (cond, 2) 4310 = build2 (code, void_type_node, unshare_expr (result), 4311 TREE_OPERAND (cond, 2)); 4312 4313 TREE_TYPE (cond) = void_type_node; 4314 recalculate_side_effects (cond); 4315 4316 if (want_value) 4317 { 4318 gimplify_and_add (cond, pre_p); 4319 *expr_p = unshare_expr (result); 4320 } 4321 else 4322 *expr_p = cond; 4323 return ret; 4324 } 4325 break; 4326 4327 case CALL_EXPR: 4328 /* For calls that return in memory, give *to_p as the CALL_EXPR's 4329 return slot so that we don't generate a temporary. */ 4330 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p) 4331 && aggregate_value_p (*from_p, *from_p)) 4332 { 4333 bool use_target; 4334 4335 if (!(rhs_predicate_for (*to_p))(*from_p)) 4336 /* If we need a temporary, *to_p isn't accurate. */ 4337 use_target = false; 4338 /* It's OK to use the return slot directly unless it's an NRV. */ 4339 else if (TREE_CODE (*to_p) == RESULT_DECL 4340 && DECL_NAME (*to_p) == NULL_TREE 4341 && needs_to_live_in_memory (*to_p)) 4342 use_target = true; 4343 else if (is_gimple_reg_type (TREE_TYPE (*to_p)) 4344 || (DECL_P (*to_p) && DECL_REGISTER (*to_p))) 4345 /* Don't force regs into memory. */ 4346 use_target = false; 4347 else if (TREE_CODE (*expr_p) == INIT_EXPR) 4348 /* It's OK to use the target directly if it's being 4349 initialized. */ 4350 use_target = true; 4351 else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE)) 4352 /* Always use the target and thus RSO for variable-sized types. 4353 GIMPLE cannot deal with a variable-sized assignment 4354 embedded in a call statement. */ 4355 use_target = true; 4356 else if (TREE_CODE (*to_p) != SSA_NAME 4357 && (!is_gimple_variable (*to_p) 4358 || needs_to_live_in_memory (*to_p))) 4359 /* Don't use the original target if it's already addressable; 4360 if its address escapes, and the called function uses the 4361 NRV optimization, a conforming program could see *to_p 4362 change before the called function returns; see c++/19317. 4363 When optimizing, the return_slot pass marks more functions 4364 as safe after we have escape info. */ 4365 use_target = false; 4366 else 4367 use_target = true; 4368 4369 if (use_target) 4370 { 4371 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1; 4372 mark_addressable (*to_p); 4373 } 4374 } 4375 break; 4376 4377 case WITH_SIZE_EXPR: 4378 /* Likewise for calls that return an aggregate of non-constant size, 4379 since we would not be able to generate a temporary at all. */ 4380 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR) 4381 { 4382 *from_p = TREE_OPERAND (*from_p, 0); 4383 /* We don't change ret in this case because the 4384 WITH_SIZE_EXPR might have been added in 4385 gimplify_modify_expr, so returning GS_OK would lead to an 4386 infinite loop. */ 4387 changed = true; 4388 } 4389 break; 4390 4391 /* If we're initializing from a container, push the initialization 4392 inside it. */ 4393 case CLEANUP_POINT_EXPR: 4394 case BIND_EXPR: 4395 case STATEMENT_LIST: 4396 { 4397 tree wrap = *from_p; 4398 tree t; 4399 4400 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval, 4401 fb_lvalue); 4402 if (ret != GS_ERROR) 4403 ret = GS_OK; 4404 4405 t = voidify_wrapper_expr (wrap, *expr_p); 4406 gcc_assert (t == *expr_p); 4407 4408 if (want_value) 4409 { 4410 gimplify_and_add (wrap, pre_p); 4411 *expr_p = unshare_expr (*to_p); 4412 } 4413 else 4414 *expr_p = wrap; 4415 return GS_OK; 4416 } 4417 4418 case COMPOUND_LITERAL_EXPR: 4419 { 4420 tree complit = TREE_OPERAND (*expr_p, 1); 4421 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit); 4422 tree decl = DECL_EXPR_DECL (decl_s); 4423 tree init = DECL_INITIAL (decl); 4424 4425 /* struct T x = (struct T) { 0, 1, 2 } can be optimized 4426 into struct T x = { 0, 1, 2 } if the address of the 4427 compound literal has never been taken. */ 4428 if (!TREE_ADDRESSABLE (complit) 4429 && !TREE_ADDRESSABLE (decl) 4430 && init) 4431 { 4432 *expr_p = copy_node (*expr_p); 4433 TREE_OPERAND (*expr_p, 1) = init; 4434 return GS_OK; 4435 } 4436 } 4437 4438 default: 4439 break; 4440 } 4441 } 4442 while (changed); 4443 4444 return ret; 4445 } 4446 4447 4448 /* Return true if T looks like a valid GIMPLE statement. */ 4449 4450 static bool 4451 is_gimple_stmt (tree t) 4452 { 4453 const enum tree_code code = TREE_CODE (t); 4454 4455 switch (code) 4456 { 4457 case NOP_EXPR: 4458 /* The only valid NOP_EXPR is the empty statement. */ 4459 return IS_EMPTY_STMT (t); 4460 4461 case BIND_EXPR: 4462 case COND_EXPR: 4463 /* These are only valid if they're void. */ 4464 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t)); 4465 4466 case SWITCH_EXPR: 4467 case GOTO_EXPR: 4468 case RETURN_EXPR: 4469 case LABEL_EXPR: 4470 case CASE_LABEL_EXPR: 4471 case TRY_CATCH_EXPR: 4472 case TRY_FINALLY_EXPR: 4473 case EH_FILTER_EXPR: 4474 case CATCH_EXPR: 4475 case ASM_EXPR: 4476 case STATEMENT_LIST: 4477 case OACC_PARALLEL: 4478 case OACC_KERNELS: 4479 case OACC_DATA: 4480 case OACC_HOST_DATA: 4481 case OACC_DECLARE: 4482 case OACC_UPDATE: 4483 case OACC_ENTER_DATA: 4484 case OACC_EXIT_DATA: 4485 case OACC_CACHE: 4486 case OMP_PARALLEL: 4487 case OMP_FOR: 4488 case OMP_SIMD: 4489 case CILK_SIMD: 4490 case OMP_DISTRIBUTE: 4491 case OACC_LOOP: 4492 case OMP_SECTIONS: 4493 case OMP_SECTION: 4494 case OMP_SINGLE: 4495 case OMP_MASTER: 4496 case OMP_TASKGROUP: 4497 case OMP_ORDERED: 4498 case OMP_CRITICAL: 4499 case OMP_TASK: 4500 /* These are always void. */ 4501 return true; 4502 4503 case CALL_EXPR: 4504 case MODIFY_EXPR: 4505 case PREDICT_EXPR: 4506 /* These are valid regardless of their type. */ 4507 return true; 4508 4509 default: 4510 return false; 4511 } 4512 } 4513 4514 4515 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is 4516 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with 4517 DECL_GIMPLE_REG_P set. 4518 4519 IMPORTANT NOTE: This promotion is performed by introducing a load of the 4520 other, unmodified part of the complex object just before the total store. 4521 As a consequence, if the object is still uninitialized, an undefined value 4522 will be loaded into a register, which may result in a spurious exception 4523 if the register is floating-point and the value happens to be a signaling 4524 NaN for example. Then the fully-fledged complex operations lowering pass 4525 followed by a DCE pass are necessary in order to fix things up. */ 4526 4527 static enum gimplify_status 4528 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p, 4529 bool want_value) 4530 { 4531 enum tree_code code, ocode; 4532 tree lhs, rhs, new_rhs, other, realpart, imagpart; 4533 4534 lhs = TREE_OPERAND (*expr_p, 0); 4535 rhs = TREE_OPERAND (*expr_p, 1); 4536 code = TREE_CODE (lhs); 4537 lhs = TREE_OPERAND (lhs, 0); 4538 4539 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR; 4540 other = build1 (ocode, TREE_TYPE (rhs), lhs); 4541 TREE_NO_WARNING (other) = 1; 4542 other = get_formal_tmp_var (other, pre_p); 4543 4544 realpart = code == REALPART_EXPR ? rhs : other; 4545 imagpart = code == REALPART_EXPR ? other : rhs; 4546 4547 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart)) 4548 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart); 4549 else 4550 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart); 4551 4552 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs)); 4553 *expr_p = (want_value) ? rhs : NULL_TREE; 4554 4555 return GS_ALL_DONE; 4556 } 4557 4558 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P. 4559 4560 modify_expr 4561 : varname '=' rhs 4562 | '*' ID '=' rhs 4563 4564 PRE_P points to the list where side effects that must happen before 4565 *EXPR_P should be stored. 4566 4567 POST_P points to the list where side effects that must happen after 4568 *EXPR_P should be stored. 4569 4570 WANT_VALUE is nonzero iff we want to use the value of this expression 4571 in another expression. */ 4572 4573 static enum gimplify_status 4574 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 4575 bool want_value) 4576 { 4577 tree *from_p = &TREE_OPERAND (*expr_p, 1); 4578 tree *to_p = &TREE_OPERAND (*expr_p, 0); 4579 enum gimplify_status ret = GS_UNHANDLED; 4580 gimple assign; 4581 location_t loc = EXPR_LOCATION (*expr_p); 4582 gimple_stmt_iterator gsi; 4583 4584 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR 4585 || TREE_CODE (*expr_p) == INIT_EXPR); 4586 4587 /* Trying to simplify a clobber using normal logic doesn't work, 4588 so handle it here. */ 4589 if (TREE_CLOBBER_P (*from_p)) 4590 { 4591 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue); 4592 if (ret == GS_ERROR) 4593 return ret; 4594 gcc_assert (!want_value 4595 && (TREE_CODE (*to_p) == VAR_DECL 4596 || TREE_CODE (*to_p) == MEM_REF)); 4597 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p)); 4598 *expr_p = NULL; 4599 return GS_ALL_DONE; 4600 } 4601 4602 /* Insert pointer conversions required by the middle-end that are not 4603 required by the frontend. This fixes middle-end type checking for 4604 for example gcc.dg/redecl-6.c. */ 4605 if (POINTER_TYPE_P (TREE_TYPE (*to_p))) 4606 { 4607 STRIP_USELESS_TYPE_CONVERSION (*from_p); 4608 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p))) 4609 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p); 4610 } 4611 4612 /* See if any simplifications can be done based on what the RHS is. */ 4613 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p, 4614 want_value); 4615 if (ret != GS_UNHANDLED) 4616 return ret; 4617 4618 /* For zero sized types only gimplify the left hand side and right hand 4619 side as statements and throw away the assignment. Do this after 4620 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable 4621 types properly. */ 4622 if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value) 4623 { 4624 gimplify_stmt (from_p, pre_p); 4625 gimplify_stmt (to_p, pre_p); 4626 *expr_p = NULL_TREE; 4627 return GS_ALL_DONE; 4628 } 4629 4630 /* If the value being copied is of variable width, compute the length 4631 of the copy into a WITH_SIZE_EXPR. Note that we need to do this 4632 before gimplifying any of the operands so that we can resolve any 4633 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses 4634 the size of the expression to be copied, not of the destination, so 4635 that is what we must do here. */ 4636 maybe_with_size_expr (from_p); 4637 4638 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue); 4639 if (ret == GS_ERROR) 4640 return ret; 4641 4642 /* As a special case, we have to temporarily allow for assignments 4643 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is 4644 a toplevel statement, when gimplifying the GENERIC expression 4645 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple 4646 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>. 4647 4648 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To 4649 prevent gimplify_expr from trying to create a new temporary for 4650 foo's LHS, we tell it that it should only gimplify until it 4651 reaches the CALL_EXPR. On return from gimplify_expr, the newly 4652 created GIMPLE_CALL <foo> will be the last statement in *PRE_P 4653 and all we need to do here is set 'a' to be its LHS. */ 4654 ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p), 4655 fb_rvalue); 4656 if (ret == GS_ERROR) 4657 return ret; 4658 4659 /* Now see if the above changed *from_p to something we handle specially. */ 4660 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p, 4661 want_value); 4662 if (ret != GS_UNHANDLED) 4663 return ret; 4664 4665 /* If we've got a variable sized assignment between two lvalues (i.e. does 4666 not involve a call), then we can make things a bit more straightforward 4667 by converting the assignment to memcpy or memset. */ 4668 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR) 4669 { 4670 tree from = TREE_OPERAND (*from_p, 0); 4671 tree size = TREE_OPERAND (*from_p, 1); 4672 4673 if (TREE_CODE (from) == CONSTRUCTOR) 4674 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p); 4675 4676 if (is_gimple_addressable (from)) 4677 { 4678 *from_p = from; 4679 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value, 4680 pre_p); 4681 } 4682 } 4683 4684 /* Transform partial stores to non-addressable complex variables into 4685 total stores. This allows us to use real instead of virtual operands 4686 for these variables, which improves optimization. */ 4687 if ((TREE_CODE (*to_p) == REALPART_EXPR 4688 || TREE_CODE (*to_p) == IMAGPART_EXPR) 4689 && is_gimple_reg (TREE_OPERAND (*to_p, 0))) 4690 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value); 4691 4692 /* Try to alleviate the effects of the gimplification creating artificial 4693 temporaries (see for example is_gimple_reg_rhs) on the debug info. */ 4694 if (!gimplify_ctxp->into_ssa 4695 && TREE_CODE (*from_p) == VAR_DECL 4696 && DECL_IGNORED_P (*from_p) 4697 && DECL_P (*to_p) 4698 && !DECL_IGNORED_P (*to_p)) 4699 { 4700 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p)) 4701 DECL_NAME (*from_p) 4702 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p))); 4703 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1; 4704 SET_DECL_DEBUG_EXPR (*from_p, *to_p); 4705 } 4706 4707 if (want_value && TREE_THIS_VOLATILE (*to_p)) 4708 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p); 4709 4710 if (TREE_CODE (*from_p) == CALL_EXPR) 4711 { 4712 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL 4713 instead of a GIMPLE_ASSIGN. */ 4714 gcall *call_stmt; 4715 if (CALL_EXPR_FN (*from_p) == NULL_TREE) 4716 { 4717 /* Gimplify internal functions created in the FEs. */ 4718 int nargs = call_expr_nargs (*from_p), i; 4719 enum internal_fn ifn = CALL_EXPR_IFN (*from_p); 4720 auto_vec<tree> vargs (nargs); 4721 4722 for (i = 0; i < nargs; i++) 4723 { 4724 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p, 4725 EXPR_LOCATION (*from_p)); 4726 vargs.quick_push (CALL_EXPR_ARG (*from_p, i)); 4727 } 4728 call_stmt = gimple_build_call_internal_vec (ifn, vargs); 4729 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p)); 4730 } 4731 else 4732 { 4733 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p)); 4734 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0); 4735 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p)); 4736 tree fndecl = get_callee_fndecl (*from_p); 4737 if (fndecl 4738 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL 4739 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT 4740 && call_expr_nargs (*from_p) == 3) 4741 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3, 4742 CALL_EXPR_ARG (*from_p, 0), 4743 CALL_EXPR_ARG (*from_p, 1), 4744 CALL_EXPR_ARG (*from_p, 2)); 4745 else 4746 { 4747 call_stmt = gimple_build_call_from_tree (*from_p); 4748 gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype)); 4749 } 4750 } 4751 notice_special_calls (call_stmt); 4752 if (!gimple_call_noreturn_p (call_stmt)) 4753 gimple_call_set_lhs (call_stmt, *to_p); 4754 assign = call_stmt; 4755 } 4756 else 4757 { 4758 assign = gimple_build_assign (*to_p, *from_p); 4759 gimple_set_location (assign, EXPR_LOCATION (*expr_p)); 4760 } 4761 4762 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p)) 4763 { 4764 /* We should have got an SSA name from the start. */ 4765 gcc_assert (TREE_CODE (*to_p) == SSA_NAME); 4766 } 4767 4768 gimplify_seq_add_stmt (pre_p, assign); 4769 gsi = gsi_last (*pre_p); 4770 maybe_fold_stmt (&gsi); 4771 4772 if (want_value) 4773 { 4774 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p); 4775 return GS_OK; 4776 } 4777 else 4778 *expr_p = NULL; 4779 4780 return GS_ALL_DONE; 4781 } 4782 4783 /* Gimplify a comparison between two variable-sized objects. Do this 4784 with a call to BUILT_IN_MEMCMP. */ 4785 4786 static enum gimplify_status 4787 gimplify_variable_sized_compare (tree *expr_p) 4788 { 4789 location_t loc = EXPR_LOCATION (*expr_p); 4790 tree op0 = TREE_OPERAND (*expr_p, 0); 4791 tree op1 = TREE_OPERAND (*expr_p, 1); 4792 tree t, arg, dest, src, expr; 4793 4794 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0)); 4795 arg = unshare_expr (arg); 4796 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0); 4797 src = build_fold_addr_expr_loc (loc, op1); 4798 dest = build_fold_addr_expr_loc (loc, op0); 4799 t = builtin_decl_implicit (BUILT_IN_MEMCMP); 4800 t = build_call_expr_loc (loc, t, 3, dest, src, arg); 4801 4802 expr 4803 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node); 4804 SET_EXPR_LOCATION (expr, loc); 4805 *expr_p = expr; 4806 4807 return GS_OK; 4808 } 4809 4810 /* Gimplify a comparison between two aggregate objects of integral scalar 4811 mode as a comparison between the bitwise equivalent scalar values. */ 4812 4813 static enum gimplify_status 4814 gimplify_scalar_mode_aggregate_compare (tree *expr_p) 4815 { 4816 location_t loc = EXPR_LOCATION (*expr_p); 4817 tree op0 = TREE_OPERAND (*expr_p, 0); 4818 tree op1 = TREE_OPERAND (*expr_p, 1); 4819 4820 tree type = TREE_TYPE (op0); 4821 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1); 4822 4823 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0); 4824 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1); 4825 4826 *expr_p 4827 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1); 4828 4829 return GS_OK; 4830 } 4831 4832 /* Gimplify an expression sequence. This function gimplifies each 4833 expression and rewrites the original expression with the last 4834 expression of the sequence in GIMPLE form. 4835 4836 PRE_P points to the list where the side effects for all the 4837 expressions in the sequence will be emitted. 4838 4839 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */ 4840 4841 static enum gimplify_status 4842 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value) 4843 { 4844 tree t = *expr_p; 4845 4846 do 4847 { 4848 tree *sub_p = &TREE_OPERAND (t, 0); 4849 4850 if (TREE_CODE (*sub_p) == COMPOUND_EXPR) 4851 gimplify_compound_expr (sub_p, pre_p, false); 4852 else 4853 gimplify_stmt (sub_p, pre_p); 4854 4855 t = TREE_OPERAND (t, 1); 4856 } 4857 while (TREE_CODE (t) == COMPOUND_EXPR); 4858 4859 *expr_p = t; 4860 if (want_value) 4861 return GS_OK; 4862 else 4863 { 4864 gimplify_stmt (expr_p, pre_p); 4865 return GS_ALL_DONE; 4866 } 4867 } 4868 4869 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to 4870 gimplify. After gimplification, EXPR_P will point to a new temporary 4871 that holds the original value of the SAVE_EXPR node. 4872 4873 PRE_P points to the list where side effects that must happen before 4874 *EXPR_P should be stored. */ 4875 4876 static enum gimplify_status 4877 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 4878 { 4879 enum gimplify_status ret = GS_ALL_DONE; 4880 tree val; 4881 4882 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR); 4883 val = TREE_OPERAND (*expr_p, 0); 4884 4885 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */ 4886 if (!SAVE_EXPR_RESOLVED_P (*expr_p)) 4887 { 4888 /* The operand may be a void-valued expression such as SAVE_EXPRs 4889 generated by the Java frontend for class initialization. It is 4890 being executed only for its side-effects. */ 4891 if (TREE_TYPE (val) == void_type_node) 4892 { 4893 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 4894 is_gimple_stmt, fb_none); 4895 val = NULL; 4896 } 4897 else 4898 val = get_initialized_tmp_var (val, pre_p, post_p); 4899 4900 TREE_OPERAND (*expr_p, 0) = val; 4901 SAVE_EXPR_RESOLVED_P (*expr_p) = 1; 4902 } 4903 4904 *expr_p = val; 4905 4906 return ret; 4907 } 4908 4909 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P 4910 4911 unary_expr 4912 : ... 4913 | '&' varname 4914 ... 4915 4916 PRE_P points to the list where side effects that must happen before 4917 *EXPR_P should be stored. 4918 4919 POST_P points to the list where side effects that must happen after 4920 *EXPR_P should be stored. */ 4921 4922 static enum gimplify_status 4923 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 4924 { 4925 tree expr = *expr_p; 4926 tree op0 = TREE_OPERAND (expr, 0); 4927 enum gimplify_status ret; 4928 location_t loc = EXPR_LOCATION (*expr_p); 4929 4930 switch (TREE_CODE (op0)) 4931 { 4932 case INDIRECT_REF: 4933 do_indirect_ref: 4934 /* Check if we are dealing with an expression of the form '&*ptr'. 4935 While the front end folds away '&*ptr' into 'ptr', these 4936 expressions may be generated internally by the compiler (e.g., 4937 builtins like __builtin_va_end). */ 4938 /* Caution: the silent array decomposition semantics we allow for 4939 ADDR_EXPR means we can't always discard the pair. */ 4940 /* Gimplification of the ADDR_EXPR operand may drop 4941 cv-qualification conversions, so make sure we add them if 4942 needed. */ 4943 { 4944 tree op00 = TREE_OPERAND (op0, 0); 4945 tree t_expr = TREE_TYPE (expr); 4946 tree t_op00 = TREE_TYPE (op00); 4947 4948 if (!useless_type_conversion_p (t_expr, t_op00)) 4949 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00); 4950 *expr_p = op00; 4951 ret = GS_OK; 4952 } 4953 break; 4954 4955 case VIEW_CONVERT_EXPR: 4956 /* Take the address of our operand and then convert it to the type of 4957 this ADDR_EXPR. 4958 4959 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at 4960 all clear. The impact of this transformation is even less clear. */ 4961 4962 /* If the operand is a useless conversion, look through it. Doing so 4963 guarantees that the ADDR_EXPR and its operand will remain of the 4964 same type. */ 4965 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0))) 4966 op0 = TREE_OPERAND (op0, 0); 4967 4968 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr), 4969 build_fold_addr_expr_loc (loc, 4970 TREE_OPERAND (op0, 0))); 4971 ret = GS_OK; 4972 break; 4973 4974 default: 4975 /* If we see a call to a declared builtin or see its address 4976 being taken (we can unify those cases here) then we can mark 4977 the builtin for implicit generation by GCC. */ 4978 if (TREE_CODE (op0) == FUNCTION_DECL 4979 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL 4980 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0))) 4981 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true); 4982 4983 /* We use fb_either here because the C frontend sometimes takes 4984 the address of a call that returns a struct; see 4985 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make 4986 the implied temporary explicit. */ 4987 4988 /* Make the operand addressable. */ 4989 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p, 4990 is_gimple_addressable, fb_either); 4991 if (ret == GS_ERROR) 4992 break; 4993 4994 /* Then mark it. Beware that it may not be possible to do so directly 4995 if a temporary has been created by the gimplification. */ 4996 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p); 4997 4998 op0 = TREE_OPERAND (expr, 0); 4999 5000 /* For various reasons, the gimplification of the expression 5001 may have made a new INDIRECT_REF. */ 5002 if (TREE_CODE (op0) == INDIRECT_REF) 5003 goto do_indirect_ref; 5004 5005 mark_addressable (TREE_OPERAND (expr, 0)); 5006 5007 /* The FEs may end up building ADDR_EXPRs early on a decl with 5008 an incomplete type. Re-build ADDR_EXPRs in canonical form 5009 here. */ 5010 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr)))) 5011 *expr_p = build_fold_addr_expr (op0); 5012 5013 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */ 5014 recompute_tree_invariant_for_addr_expr (*expr_p); 5015 5016 /* If we re-built the ADDR_EXPR add a conversion to the original type 5017 if required. */ 5018 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p))) 5019 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p); 5020 5021 break; 5022 } 5023 5024 return ret; 5025 } 5026 5027 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple 5028 value; output operands should be a gimple lvalue. */ 5029 5030 static enum gimplify_status 5031 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 5032 { 5033 tree expr; 5034 int noutputs; 5035 const char **oconstraints; 5036 int i; 5037 tree link; 5038 const char *constraint; 5039 bool allows_mem, allows_reg, is_inout; 5040 enum gimplify_status ret, tret; 5041 gasm *stmt; 5042 vec<tree, va_gc> *inputs; 5043 vec<tree, va_gc> *outputs; 5044 vec<tree, va_gc> *clobbers; 5045 vec<tree, va_gc> *labels; 5046 tree link_next; 5047 5048 expr = *expr_p; 5049 noutputs = list_length (ASM_OUTPUTS (expr)); 5050 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *)); 5051 5052 inputs = NULL; 5053 outputs = NULL; 5054 clobbers = NULL; 5055 labels = NULL; 5056 5057 ret = GS_ALL_DONE; 5058 link_next = NULL_TREE; 5059 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next) 5060 { 5061 bool ok; 5062 size_t constraint_len; 5063 5064 link_next = TREE_CHAIN (link); 5065 5066 oconstraints[i] 5067 = constraint 5068 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 5069 constraint_len = strlen (constraint); 5070 if (constraint_len == 0) 5071 continue; 5072 5073 ok = parse_output_constraint (&constraint, i, 0, 0, 5074 &allows_mem, &allows_reg, &is_inout); 5075 if (!ok) 5076 { 5077 ret = GS_ERROR; 5078 is_inout = false; 5079 } 5080 5081 if (!allows_reg && allows_mem) 5082 mark_addressable (TREE_VALUE (link)); 5083 5084 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, 5085 is_inout ? is_gimple_min_lval : is_gimple_lvalue, 5086 fb_lvalue | fb_mayfail); 5087 if (tret == GS_ERROR) 5088 { 5089 error ("invalid lvalue in asm output %d", i); 5090 ret = tret; 5091 } 5092 5093 vec_safe_push (outputs, link); 5094 TREE_CHAIN (link) = NULL_TREE; 5095 5096 if (is_inout) 5097 { 5098 /* An input/output operand. To give the optimizers more 5099 flexibility, split it into separate input and output 5100 operands. */ 5101 tree input; 5102 char buf[10]; 5103 5104 /* Turn the in/out constraint into an output constraint. */ 5105 char *p = xstrdup (constraint); 5106 p[0] = '='; 5107 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p); 5108 5109 /* And add a matching input constraint. */ 5110 if (allows_reg) 5111 { 5112 sprintf (buf, "%d", i); 5113 5114 /* If there are multiple alternatives in the constraint, 5115 handle each of them individually. Those that allow register 5116 will be replaced with operand number, the others will stay 5117 unchanged. */ 5118 if (strchr (p, ',') != NULL) 5119 { 5120 size_t len = 0, buflen = strlen (buf); 5121 char *beg, *end, *str, *dst; 5122 5123 for (beg = p + 1;;) 5124 { 5125 end = strchr (beg, ','); 5126 if (end == NULL) 5127 end = strchr (beg, '\0'); 5128 if ((size_t) (end - beg) < buflen) 5129 len += buflen + 1; 5130 else 5131 len += end - beg + 1; 5132 if (*end) 5133 beg = end + 1; 5134 else 5135 break; 5136 } 5137 5138 str = (char *) alloca (len); 5139 for (beg = p + 1, dst = str;;) 5140 { 5141 const char *tem; 5142 bool mem_p, reg_p, inout_p; 5143 5144 end = strchr (beg, ','); 5145 if (end) 5146 *end = '\0'; 5147 beg[-1] = '='; 5148 tem = beg - 1; 5149 parse_output_constraint (&tem, i, 0, 0, 5150 &mem_p, ®_p, &inout_p); 5151 if (dst != str) 5152 *dst++ = ','; 5153 if (reg_p) 5154 { 5155 memcpy (dst, buf, buflen); 5156 dst += buflen; 5157 } 5158 else 5159 { 5160 if (end) 5161 len = end - beg; 5162 else 5163 len = strlen (beg); 5164 memcpy (dst, beg, len); 5165 dst += len; 5166 } 5167 if (end) 5168 beg = end + 1; 5169 else 5170 break; 5171 } 5172 *dst = '\0'; 5173 input = build_string (dst - str, str); 5174 } 5175 else 5176 input = build_string (strlen (buf), buf); 5177 } 5178 else 5179 input = build_string (constraint_len - 1, constraint + 1); 5180 5181 free (p); 5182 5183 input = build_tree_list (build_tree_list (NULL_TREE, input), 5184 unshare_expr (TREE_VALUE (link))); 5185 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input); 5186 } 5187 } 5188 5189 link_next = NULL_TREE; 5190 for (link = ASM_INPUTS (expr); link; ++i, link = link_next) 5191 { 5192 link_next = TREE_CHAIN (link); 5193 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); 5194 parse_input_constraint (&constraint, 0, 0, noutputs, 0, 5195 oconstraints, &allows_mem, &allows_reg); 5196 5197 /* If we can't make copies, we can only accept memory. */ 5198 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link)))) 5199 { 5200 if (allows_mem) 5201 allows_reg = 0; 5202 else 5203 { 5204 error ("impossible constraint in %<asm%>"); 5205 error ("non-memory input %d must stay in memory", i); 5206 return GS_ERROR; 5207 } 5208 } 5209 5210 /* If the operand is a memory input, it should be an lvalue. */ 5211 if (!allows_reg && allows_mem) 5212 { 5213 tree inputv = TREE_VALUE (link); 5214 STRIP_NOPS (inputv); 5215 if (TREE_CODE (inputv) == PREDECREMENT_EXPR 5216 || TREE_CODE (inputv) == PREINCREMENT_EXPR 5217 || TREE_CODE (inputv) == POSTDECREMENT_EXPR 5218 || TREE_CODE (inputv) == POSTINCREMENT_EXPR) 5219 TREE_VALUE (link) = error_mark_node; 5220 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, 5221 is_gimple_lvalue, fb_lvalue | fb_mayfail); 5222 if (tret != GS_ERROR) 5223 { 5224 /* Unlike output operands, memory inputs are not guaranteed 5225 to be lvalues by the FE, and while the expressions are 5226 marked addressable there, if it is e.g. a statement 5227 expression, temporaries in it might not end up being 5228 addressable. They might be already used in the IL and thus 5229 it is too late to make them addressable now though. */ 5230 tree x = TREE_VALUE (link); 5231 while (handled_component_p (x)) 5232 x = TREE_OPERAND (x, 0); 5233 if (TREE_CODE (x) == MEM_REF 5234 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR) 5235 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0); 5236 if ((TREE_CODE (x) == VAR_DECL 5237 || TREE_CODE (x) == PARM_DECL 5238 || TREE_CODE (x) == RESULT_DECL) 5239 && !TREE_ADDRESSABLE (x) 5240 && is_gimple_reg (x)) 5241 { 5242 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), 5243 input_location), 0, 5244 "memory input %d is not directly addressable", 5245 i); 5246 prepare_gimple_addressable (&TREE_VALUE (link), pre_p); 5247 } 5248 } 5249 mark_addressable (TREE_VALUE (link)); 5250 if (tret == GS_ERROR) 5251 { 5252 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location), 5253 "memory input %d is not directly addressable", i); 5254 ret = tret; 5255 } 5256 } 5257 else 5258 { 5259 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p, 5260 is_gimple_asm_val, fb_rvalue); 5261 if (tret == GS_ERROR) 5262 ret = tret; 5263 } 5264 5265 TREE_CHAIN (link) = NULL_TREE; 5266 vec_safe_push (inputs, link); 5267 } 5268 5269 link_next = NULL_TREE; 5270 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next) 5271 { 5272 link_next = TREE_CHAIN (link); 5273 TREE_CHAIN (link) = NULL_TREE; 5274 vec_safe_push (clobbers, link); 5275 } 5276 5277 link_next = NULL_TREE; 5278 for (link = ASM_LABELS (expr); link; ++i, link = link_next) 5279 { 5280 link_next = TREE_CHAIN (link); 5281 TREE_CHAIN (link) = NULL_TREE; 5282 vec_safe_push (labels, link); 5283 } 5284 5285 /* Do not add ASMs with errors to the gimple IL stream. */ 5286 if (ret != GS_ERROR) 5287 { 5288 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)), 5289 inputs, outputs, clobbers, labels); 5290 5291 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr)); 5292 gimple_asm_set_input (stmt, ASM_INPUT_P (expr)); 5293 5294 gimplify_seq_add_stmt (pre_p, stmt); 5295 } 5296 5297 return ret; 5298 } 5299 5300 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding 5301 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while 5302 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we 5303 return to this function. 5304 5305 FIXME should we complexify the prequeue handling instead? Or use flags 5306 for all the cleanups and let the optimizer tighten them up? The current 5307 code seems pretty fragile; it will break on a cleanup within any 5308 non-conditional nesting. But any such nesting would be broken, anyway; 5309 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct 5310 and continues out of it. We can do that at the RTL level, though, so 5311 having an optimizer to tighten up try/finally regions would be a Good 5312 Thing. */ 5313 5314 static enum gimplify_status 5315 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p) 5316 { 5317 gimple_stmt_iterator iter; 5318 gimple_seq body_sequence = NULL; 5319 5320 tree temp = voidify_wrapper_expr (*expr_p, NULL); 5321 5322 /* We only care about the number of conditions between the innermost 5323 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and 5324 any cleanups collected outside the CLEANUP_POINT_EXPR. */ 5325 int old_conds = gimplify_ctxp->conditions; 5326 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups; 5327 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr; 5328 gimplify_ctxp->conditions = 0; 5329 gimplify_ctxp->conditional_cleanups = NULL; 5330 gimplify_ctxp->in_cleanup_point_expr = true; 5331 5332 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence); 5333 5334 gimplify_ctxp->conditions = old_conds; 5335 gimplify_ctxp->conditional_cleanups = old_cleanups; 5336 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr; 5337 5338 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); ) 5339 { 5340 gimple wce = gsi_stmt (iter); 5341 5342 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR) 5343 { 5344 if (gsi_one_before_end_p (iter)) 5345 { 5346 /* Note that gsi_insert_seq_before and gsi_remove do not 5347 scan operands, unlike some other sequence mutators. */ 5348 if (!gimple_wce_cleanup_eh_only (wce)) 5349 gsi_insert_seq_before_without_update (&iter, 5350 gimple_wce_cleanup (wce), 5351 GSI_SAME_STMT); 5352 gsi_remove (&iter, true); 5353 break; 5354 } 5355 else 5356 { 5357 gtry *gtry; 5358 gimple_seq seq; 5359 enum gimple_try_flags kind; 5360 5361 if (gimple_wce_cleanup_eh_only (wce)) 5362 kind = GIMPLE_TRY_CATCH; 5363 else 5364 kind = GIMPLE_TRY_FINALLY; 5365 seq = gsi_split_seq_after (iter); 5366 5367 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind); 5368 /* Do not use gsi_replace here, as it may scan operands. 5369 We want to do a simple structural modification only. */ 5370 gsi_set_stmt (&iter, gtry); 5371 iter = gsi_start (gtry->eval); 5372 } 5373 } 5374 else 5375 gsi_next (&iter); 5376 } 5377 5378 gimplify_seq_add_seq (pre_p, body_sequence); 5379 if (temp) 5380 { 5381 *expr_p = temp; 5382 return GS_OK; 5383 } 5384 else 5385 { 5386 *expr_p = NULL; 5387 return GS_ALL_DONE; 5388 } 5389 } 5390 5391 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP 5392 is the cleanup action required. EH_ONLY is true if the cleanup should 5393 only be executed if an exception is thrown, not on normal exit. */ 5394 5395 static void 5396 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p) 5397 { 5398 gimple wce; 5399 gimple_seq cleanup_stmts = NULL; 5400 5401 /* Errors can result in improperly nested cleanups. Which results in 5402 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */ 5403 if (seen_error ()) 5404 return; 5405 5406 if (gimple_conditional_context ()) 5407 { 5408 /* If we're in a conditional context, this is more complex. We only 5409 want to run the cleanup if we actually ran the initialization that 5410 necessitates it, but we want to run it after the end of the 5411 conditional context. So we wrap the try/finally around the 5412 condition and use a flag to determine whether or not to actually 5413 run the destructor. Thus 5414 5415 test ? f(A()) : 0 5416 5417 becomes (approximately) 5418 5419 flag = 0; 5420 try { 5421 if (test) { A::A(temp); flag = 1; val = f(temp); } 5422 else { val = 0; } 5423 } finally { 5424 if (flag) A::~A(temp); 5425 } 5426 val 5427 */ 5428 tree flag = create_tmp_var (boolean_type_node, "cleanup"); 5429 gassign *ffalse = gimple_build_assign (flag, boolean_false_node); 5430 gassign *ftrue = gimple_build_assign (flag, boolean_true_node); 5431 5432 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL); 5433 gimplify_stmt (&cleanup, &cleanup_stmts); 5434 wce = gimple_build_wce (cleanup_stmts); 5435 5436 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse); 5437 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce); 5438 gimplify_seq_add_stmt (pre_p, ftrue); 5439 5440 /* Because of this manipulation, and the EH edges that jump 5441 threading cannot redirect, the temporary (VAR) will appear 5442 to be used uninitialized. Don't warn. */ 5443 TREE_NO_WARNING (var) = 1; 5444 } 5445 else 5446 { 5447 gimplify_stmt (&cleanup, &cleanup_stmts); 5448 wce = gimple_build_wce (cleanup_stmts); 5449 gimple_wce_set_cleanup_eh_only (wce, eh_only); 5450 gimplify_seq_add_stmt (pre_p, wce); 5451 } 5452 } 5453 5454 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */ 5455 5456 static enum gimplify_status 5457 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 5458 { 5459 tree targ = *expr_p; 5460 tree temp = TARGET_EXPR_SLOT (targ); 5461 tree init = TARGET_EXPR_INITIAL (targ); 5462 enum gimplify_status ret; 5463 5464 if (init) 5465 { 5466 tree cleanup = NULL_TREE; 5467 5468 /* TARGET_EXPR temps aren't part of the enclosing block, so add it 5469 to the temps list. Handle also variable length TARGET_EXPRs. */ 5470 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST) 5471 { 5472 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp))) 5473 gimplify_type_sizes (TREE_TYPE (temp), pre_p); 5474 gimplify_vla_decl (temp, pre_p); 5475 } 5476 else 5477 gimple_add_tmp_var (temp); 5478 5479 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the 5480 expression is supposed to initialize the slot. */ 5481 if (VOID_TYPE_P (TREE_TYPE (init))) 5482 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none); 5483 else 5484 { 5485 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init); 5486 init = init_expr; 5487 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none); 5488 init = NULL; 5489 ggc_free (init_expr); 5490 } 5491 if (ret == GS_ERROR) 5492 { 5493 /* PR c++/28266 Make sure this is expanded only once. */ 5494 TARGET_EXPR_INITIAL (targ) = NULL_TREE; 5495 return GS_ERROR; 5496 } 5497 if (init) 5498 gimplify_and_add (init, pre_p); 5499 5500 /* If needed, push the cleanup for the temp. */ 5501 if (TARGET_EXPR_CLEANUP (targ)) 5502 { 5503 if (CLEANUP_EH_ONLY (targ)) 5504 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ), 5505 CLEANUP_EH_ONLY (targ), pre_p); 5506 else 5507 cleanup = TARGET_EXPR_CLEANUP (targ); 5508 } 5509 5510 /* Add a clobber for the temporary going out of scope, like 5511 gimplify_bind_expr. */ 5512 if (gimplify_ctxp->in_cleanup_point_expr 5513 && needs_to_live_in_memory (temp) 5514 && flag_stack_reuse == SR_ALL) 5515 { 5516 tree clobber = build_constructor (TREE_TYPE (temp), 5517 NULL); 5518 TREE_THIS_VOLATILE (clobber) = true; 5519 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber); 5520 if (cleanup) 5521 cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup, 5522 clobber); 5523 else 5524 cleanup = clobber; 5525 } 5526 5527 if (cleanup) 5528 gimple_push_cleanup (temp, cleanup, false, pre_p); 5529 5530 /* Only expand this once. */ 5531 TREE_OPERAND (targ, 3) = init; 5532 TARGET_EXPR_INITIAL (targ) = NULL_TREE; 5533 } 5534 else 5535 /* We should have expanded this before. */ 5536 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp)); 5537 5538 *expr_p = temp; 5539 return GS_OK; 5540 } 5541 5542 /* Gimplification of expression trees. */ 5543 5544 /* Gimplify an expression which appears at statement context. The 5545 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is 5546 NULL, a new sequence is allocated. 5547 5548 Return true if we actually added a statement to the queue. */ 5549 5550 bool 5551 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p) 5552 { 5553 gimple_seq_node last; 5554 5555 last = gimple_seq_last (*seq_p); 5556 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none); 5557 return last != gimple_seq_last (*seq_p); 5558 } 5559 5560 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels 5561 to CTX. If entries already exist, force them to be some flavor of private. 5562 If there is no enclosing parallel, do nothing. */ 5563 5564 void 5565 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl) 5566 { 5567 splay_tree_node n; 5568 5569 if (decl == NULL || !DECL_P (decl)) 5570 return; 5571 5572 do 5573 { 5574 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 5575 if (n != NULL) 5576 { 5577 if (n->value & GOVD_SHARED) 5578 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN); 5579 else if (n->value & GOVD_MAP) 5580 n->value |= GOVD_MAP_TO_ONLY; 5581 else 5582 return; 5583 } 5584 else if (ctx->region_type == ORT_TARGET) 5585 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY); 5586 else if (ctx->region_type != ORT_WORKSHARE 5587 && ctx->region_type != ORT_SIMD 5588 && ctx->region_type != ORT_TARGET_DATA) 5589 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE); 5590 5591 ctx = ctx->outer_context; 5592 } 5593 while (ctx); 5594 } 5595 5596 /* Similarly for each of the type sizes of TYPE. */ 5597 5598 static void 5599 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type) 5600 { 5601 if (type == NULL || type == error_mark_node) 5602 return; 5603 type = TYPE_MAIN_VARIANT (type); 5604 5605 if (ctx->privatized_types->add (type)) 5606 return; 5607 5608 switch (TREE_CODE (type)) 5609 { 5610 case INTEGER_TYPE: 5611 case ENUMERAL_TYPE: 5612 case BOOLEAN_TYPE: 5613 case REAL_TYPE: 5614 case FIXED_POINT_TYPE: 5615 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type)); 5616 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type)); 5617 break; 5618 5619 case ARRAY_TYPE: 5620 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type)); 5621 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type)); 5622 break; 5623 5624 case RECORD_TYPE: 5625 case UNION_TYPE: 5626 case QUAL_UNION_TYPE: 5627 { 5628 tree field; 5629 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) 5630 if (TREE_CODE (field) == FIELD_DECL) 5631 { 5632 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field)); 5633 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field)); 5634 } 5635 } 5636 break; 5637 5638 case POINTER_TYPE: 5639 case REFERENCE_TYPE: 5640 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type)); 5641 break; 5642 5643 default: 5644 break; 5645 } 5646 5647 omp_firstprivatize_variable (ctx, TYPE_SIZE (type)); 5648 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type)); 5649 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type); 5650 } 5651 5652 /* Add an entry for DECL in the OMP context CTX with FLAGS. */ 5653 5654 static void 5655 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags) 5656 { 5657 splay_tree_node n; 5658 unsigned int nflags; 5659 tree t; 5660 5661 if (error_operand_p (decl)) 5662 return; 5663 5664 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means 5665 there are constructors involved somewhere. */ 5666 if (TREE_ADDRESSABLE (TREE_TYPE (decl)) 5667 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))) 5668 flags |= GOVD_SEEN; 5669 5670 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 5671 if (n != NULL && n->value != GOVD_ALIGNED) 5672 { 5673 /* We shouldn't be re-adding the decl with the same data 5674 sharing class. */ 5675 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0); 5676 /* The only combination of data sharing classes we should see is 5677 FIRSTPRIVATE and LASTPRIVATE. */ 5678 nflags = n->value | flags; 5679 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS) 5680 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE) 5681 || (flags & GOVD_DATA_SHARE_CLASS) == 0); 5682 n->value = nflags; 5683 return; 5684 } 5685 5686 /* When adding a variable-sized variable, we have to handle all sorts 5687 of additional bits of data: the pointer replacement variable, and 5688 the parameters of the type. */ 5689 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 5690 { 5691 /* Add the pointer replacement variable as PRIVATE if the variable 5692 replacement is private, else FIRSTPRIVATE since we'll need the 5693 address of the original variable either for SHARED, or for the 5694 copy into or out of the context. */ 5695 if (!(flags & GOVD_LOCAL)) 5696 { 5697 if (flags & GOVD_MAP) 5698 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT; 5699 else if (flags & GOVD_PRIVATE) 5700 nflags = GOVD_PRIVATE; 5701 else 5702 nflags = GOVD_FIRSTPRIVATE; 5703 nflags |= flags & GOVD_SEEN; 5704 t = DECL_VALUE_EXPR (decl); 5705 gcc_assert (TREE_CODE (t) == INDIRECT_REF); 5706 t = TREE_OPERAND (t, 0); 5707 gcc_assert (DECL_P (t)); 5708 omp_add_variable (ctx, t, nflags); 5709 } 5710 5711 /* Add all of the variable and type parameters (which should have 5712 been gimplified to a formal temporary) as FIRSTPRIVATE. */ 5713 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl)); 5714 omp_firstprivatize_variable (ctx, DECL_SIZE (decl)); 5715 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl)); 5716 5717 /* The variable-sized variable itself is never SHARED, only some form 5718 of PRIVATE. The sharing would take place via the pointer variable 5719 which we remapped above. */ 5720 if (flags & GOVD_SHARED) 5721 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE 5722 | (flags & (GOVD_SEEN | GOVD_EXPLICIT)); 5723 5724 /* We're going to make use of the TYPE_SIZE_UNIT at least in the 5725 alloca statement we generate for the variable, so make sure it 5726 is available. This isn't automatically needed for the SHARED 5727 case, since we won't be allocating local storage then. 5728 For local variables TYPE_SIZE_UNIT might not be gimplified yet, 5729 in this case omp_notice_variable will be called later 5730 on when it is gimplified. */ 5731 else if (! (flags & (GOVD_LOCAL | GOVD_MAP)) 5732 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl)))) 5733 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true); 5734 } 5735 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0 5736 && lang_hooks.decls.omp_privatize_by_reference (decl)) 5737 { 5738 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl)); 5739 5740 /* Similar to the direct variable sized case above, we'll need the 5741 size of references being privatized. */ 5742 if ((flags & GOVD_SHARED) == 0) 5743 { 5744 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))); 5745 if (TREE_CODE (t) != INTEGER_CST) 5746 omp_notice_variable (ctx, t, true); 5747 } 5748 } 5749 5750 if (n != NULL) 5751 n->value |= flags; 5752 else 5753 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags); 5754 } 5755 5756 /* Notice a threadprivate variable DECL used in OMP context CTX. 5757 This just prints out diagnostics about threadprivate variable uses 5758 in untied tasks. If DECL2 is non-NULL, prevent this warning 5759 on that variable. */ 5760 5761 static bool 5762 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl, 5763 tree decl2) 5764 { 5765 splay_tree_node n; 5766 struct gimplify_omp_ctx *octx; 5767 5768 for (octx = ctx; octx; octx = octx->outer_context) 5769 if (octx->region_type == ORT_TARGET) 5770 { 5771 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl); 5772 if (n == NULL) 5773 { 5774 error ("threadprivate variable %qE used in target region", 5775 DECL_NAME (decl)); 5776 error_at (octx->location, "enclosing target region"); 5777 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0); 5778 } 5779 if (decl2) 5780 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0); 5781 } 5782 5783 if (ctx->region_type != ORT_UNTIED_TASK) 5784 return false; 5785 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 5786 if (n == NULL) 5787 { 5788 error ("threadprivate variable %qE used in untied task", 5789 DECL_NAME (decl)); 5790 error_at (ctx->location, "enclosing task"); 5791 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0); 5792 } 5793 if (decl2) 5794 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0); 5795 return false; 5796 } 5797 5798 /* Record the fact that DECL was used within the OMP context CTX. 5799 IN_CODE is true when real code uses DECL, and false when we should 5800 merely emit default(none) errors. Return true if DECL is going to 5801 be remapped and thus DECL shouldn't be gimplified into its 5802 DECL_VALUE_EXPR (if any). */ 5803 5804 static bool 5805 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code) 5806 { 5807 splay_tree_node n; 5808 unsigned flags = in_code ? GOVD_SEEN : 0; 5809 bool ret = false, shared; 5810 5811 if (error_operand_p (decl)) 5812 return false; 5813 5814 /* Threadprivate variables are predetermined. */ 5815 if (is_global_var (decl)) 5816 { 5817 if (DECL_THREAD_LOCAL_P (decl)) 5818 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE); 5819 5820 if (DECL_HAS_VALUE_EXPR_P (decl)) 5821 { 5822 tree value = get_base_address (DECL_VALUE_EXPR (decl)); 5823 5824 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value)) 5825 return omp_notice_threadprivate_variable (ctx, decl, value); 5826 } 5827 } 5828 5829 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 5830 if (ctx->region_type == ORT_TARGET) 5831 { 5832 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true); 5833 if (n == NULL) 5834 { 5835 if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl))) 5836 { 5837 error ("%qD referenced in target region does not have " 5838 "a mappable type", decl); 5839 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_EXPLICIT | flags); 5840 } 5841 else 5842 omp_add_variable (ctx, decl, GOVD_MAP | flags); 5843 } 5844 else 5845 { 5846 /* If nothing changed, there's nothing left to do. */ 5847 if ((n->value & flags) == flags) 5848 return ret; 5849 n->value |= flags; 5850 } 5851 goto do_outer; 5852 } 5853 5854 if (n == NULL) 5855 { 5856 enum omp_clause_default_kind default_kind, kind; 5857 struct gimplify_omp_ctx *octx; 5858 5859 if (ctx->region_type == ORT_WORKSHARE 5860 || ctx->region_type == ORT_SIMD 5861 || ctx->region_type == ORT_TARGET_DATA) 5862 goto do_outer; 5863 5864 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be 5865 remapped firstprivate instead of shared. To some extent this is 5866 addressed in omp_firstprivatize_type_sizes, but not effectively. */ 5867 default_kind = ctx->default_kind; 5868 kind = lang_hooks.decls.omp_predetermined_sharing (decl); 5869 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED) 5870 default_kind = kind; 5871 5872 switch (default_kind) 5873 { 5874 case OMP_CLAUSE_DEFAULT_NONE: 5875 if ((ctx->region_type & ORT_PARALLEL) != 0) 5876 { 5877 error ("%qE not specified in enclosing parallel", 5878 DECL_NAME (lang_hooks.decls.omp_report_decl (decl))); 5879 error_at (ctx->location, "enclosing parallel"); 5880 } 5881 else if ((ctx->region_type & ORT_TASK) != 0) 5882 { 5883 error ("%qE not specified in enclosing task", 5884 DECL_NAME (lang_hooks.decls.omp_report_decl (decl))); 5885 error_at (ctx->location, "enclosing task"); 5886 } 5887 else if (ctx->region_type & ORT_TEAMS) 5888 { 5889 error ("%qE not specified in enclosing teams construct", 5890 DECL_NAME (lang_hooks.decls.omp_report_decl (decl))); 5891 error_at (ctx->location, "enclosing teams construct"); 5892 } 5893 else 5894 gcc_unreachable (); 5895 /* FALLTHRU */ 5896 case OMP_CLAUSE_DEFAULT_SHARED: 5897 flags |= GOVD_SHARED; 5898 break; 5899 case OMP_CLAUSE_DEFAULT_PRIVATE: 5900 flags |= GOVD_PRIVATE; 5901 break; 5902 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE: 5903 flags |= GOVD_FIRSTPRIVATE; 5904 break; 5905 case OMP_CLAUSE_DEFAULT_UNSPECIFIED: 5906 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */ 5907 gcc_assert ((ctx->region_type & ORT_TASK) != 0); 5908 if (ctx->outer_context) 5909 omp_notice_variable (ctx->outer_context, decl, in_code); 5910 for (octx = ctx->outer_context; octx; octx = octx->outer_context) 5911 { 5912 splay_tree_node n2; 5913 5914 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0) 5915 continue; 5916 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl); 5917 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED) 5918 { 5919 flags |= GOVD_FIRSTPRIVATE; 5920 break; 5921 } 5922 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0) 5923 break; 5924 } 5925 if (flags & GOVD_FIRSTPRIVATE) 5926 break; 5927 if (octx == NULL 5928 && (TREE_CODE (decl) == PARM_DECL 5929 || (!is_global_var (decl) 5930 && DECL_CONTEXT (decl) == current_function_decl))) 5931 { 5932 flags |= GOVD_FIRSTPRIVATE; 5933 break; 5934 } 5935 flags |= GOVD_SHARED; 5936 break; 5937 default: 5938 gcc_unreachable (); 5939 } 5940 5941 if ((flags & GOVD_PRIVATE) 5942 && lang_hooks.decls.omp_private_outer_ref (decl)) 5943 flags |= GOVD_PRIVATE_OUTER_REF; 5944 5945 omp_add_variable (ctx, decl, flags); 5946 5947 shared = (flags & GOVD_SHARED) != 0; 5948 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared); 5949 goto do_outer; 5950 } 5951 5952 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0 5953 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN 5954 && DECL_SIZE (decl) 5955 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 5956 { 5957 splay_tree_node n2; 5958 tree t = DECL_VALUE_EXPR (decl); 5959 gcc_assert (TREE_CODE (t) == INDIRECT_REF); 5960 t = TREE_OPERAND (t, 0); 5961 gcc_assert (DECL_P (t)); 5962 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t); 5963 n2->value |= GOVD_SEEN; 5964 } 5965 5966 shared = ((flags | n->value) & GOVD_SHARED) != 0; 5967 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared); 5968 5969 /* If nothing changed, there's nothing left to do. */ 5970 if ((n->value & flags) == flags) 5971 return ret; 5972 flags |= n->value; 5973 n->value = flags; 5974 5975 do_outer: 5976 /* If the variable is private in the current context, then we don't 5977 need to propagate anything to an outer context. */ 5978 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF)) 5979 return ret; 5980 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER)) 5981 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER)) 5982 return ret; 5983 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE 5984 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER)) 5985 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER)) 5986 return ret; 5987 if (ctx->outer_context 5988 && omp_notice_variable (ctx->outer_context, decl, in_code)) 5989 return true; 5990 return ret; 5991 } 5992 5993 /* Verify that DECL is private within CTX. If there's specific information 5994 to the contrary in the innermost scope, generate an error. */ 5995 5996 static bool 5997 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd) 5998 { 5999 splay_tree_node n; 6000 6001 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl); 6002 if (n != NULL) 6003 { 6004 if (n->value & GOVD_SHARED) 6005 { 6006 if (ctx == gimplify_omp_ctxp) 6007 { 6008 if (simd) 6009 error ("iteration variable %qE is predetermined linear", 6010 DECL_NAME (decl)); 6011 else 6012 error ("iteration variable %qE should be private", 6013 DECL_NAME (decl)); 6014 n->value = GOVD_PRIVATE; 6015 return true; 6016 } 6017 else 6018 return false; 6019 } 6020 else if ((n->value & GOVD_EXPLICIT) != 0 6021 && (ctx == gimplify_omp_ctxp 6022 || (ctx->region_type == ORT_COMBINED_PARALLEL 6023 && gimplify_omp_ctxp->outer_context == ctx))) 6024 { 6025 if ((n->value & GOVD_FIRSTPRIVATE) != 0) 6026 error ("iteration variable %qE should not be firstprivate", 6027 DECL_NAME (decl)); 6028 else if ((n->value & GOVD_REDUCTION) != 0) 6029 error ("iteration variable %qE should not be reduction", 6030 DECL_NAME (decl)); 6031 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0) 6032 error ("iteration variable %qE should not be lastprivate", 6033 DECL_NAME (decl)); 6034 else if (simd && (n->value & GOVD_PRIVATE) != 0) 6035 error ("iteration variable %qE should not be private", 6036 DECL_NAME (decl)); 6037 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0) 6038 error ("iteration variable %qE is predetermined linear", 6039 DECL_NAME (decl)); 6040 } 6041 return (ctx == gimplify_omp_ctxp 6042 || (ctx->region_type == ORT_COMBINED_PARALLEL 6043 && gimplify_omp_ctxp->outer_context == ctx)); 6044 } 6045 6046 if (ctx->region_type != ORT_WORKSHARE 6047 && ctx->region_type != ORT_SIMD) 6048 return false; 6049 else if (ctx->outer_context) 6050 return omp_is_private (ctx->outer_context, decl, simd); 6051 return false; 6052 } 6053 6054 /* Return true if DECL is private within a parallel region 6055 that binds to the current construct's context or in parallel 6056 region's REDUCTION clause. */ 6057 6058 static bool 6059 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate) 6060 { 6061 splay_tree_node n; 6062 6063 do 6064 { 6065 ctx = ctx->outer_context; 6066 if (ctx == NULL) 6067 return !(is_global_var (decl) 6068 /* References might be private, but might be shared too, 6069 when checking for copyprivate, assume they might be 6070 private, otherwise assume they might be shared. */ 6071 || (!copyprivate 6072 && lang_hooks.decls.omp_privatize_by_reference (decl))); 6073 6074 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0) 6075 continue; 6076 6077 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 6078 if (n != NULL) 6079 return (n->value & GOVD_SHARED) == 0; 6080 } 6081 while (ctx->region_type == ORT_WORKSHARE 6082 || ctx->region_type == ORT_SIMD); 6083 return false; 6084 } 6085 6086 /* Return true if the CTX is combined with distribute and thus 6087 lastprivate can't be supported. */ 6088 6089 static bool 6090 omp_no_lastprivate (struct gimplify_omp_ctx *ctx) 6091 { 6092 do 6093 { 6094 if (ctx->outer_context == NULL) 6095 return false; 6096 ctx = ctx->outer_context; 6097 switch (ctx->region_type) 6098 { 6099 case ORT_WORKSHARE: 6100 if (!ctx->combined_loop) 6101 return false; 6102 if (ctx->distribute) 6103 return true; 6104 break; 6105 case ORT_COMBINED_PARALLEL: 6106 break; 6107 case ORT_COMBINED_TEAMS: 6108 return true; 6109 default: 6110 return false; 6111 } 6112 } 6113 while (1); 6114 } 6115 6116 /* Scan the OMP clauses in *LIST_P, installing mappings into a new 6117 and previous omp contexts. */ 6118 6119 static void 6120 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p, 6121 enum omp_region_type region_type) 6122 { 6123 struct gimplify_omp_ctx *ctx, *outer_ctx; 6124 tree c; 6125 6126 ctx = new_omp_context (region_type); 6127 outer_ctx = ctx->outer_context; 6128 6129 while ((c = *list_p) != NULL) 6130 { 6131 bool remove = false; 6132 bool notice_outer = true; 6133 const char *check_non_private = NULL; 6134 unsigned int flags; 6135 tree decl; 6136 6137 switch (OMP_CLAUSE_CODE (c)) 6138 { 6139 case OMP_CLAUSE_PRIVATE: 6140 flags = GOVD_PRIVATE | GOVD_EXPLICIT; 6141 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c))) 6142 { 6143 flags |= GOVD_PRIVATE_OUTER_REF; 6144 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1; 6145 } 6146 else 6147 notice_outer = false; 6148 goto do_add; 6149 case OMP_CLAUSE_SHARED: 6150 flags = GOVD_SHARED | GOVD_EXPLICIT; 6151 goto do_add; 6152 case OMP_CLAUSE_FIRSTPRIVATE: 6153 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT; 6154 check_non_private = "firstprivate"; 6155 goto do_add; 6156 case OMP_CLAUSE_LASTPRIVATE: 6157 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT; 6158 check_non_private = "lastprivate"; 6159 decl = OMP_CLAUSE_DECL (c); 6160 if (omp_no_lastprivate (ctx)) 6161 { 6162 notice_outer = false; 6163 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER; 6164 } 6165 else if (error_operand_p (decl)) 6166 goto do_add; 6167 else if (outer_ctx 6168 && outer_ctx->region_type == ORT_COMBINED_PARALLEL 6169 && splay_tree_lookup (outer_ctx->variables, 6170 (splay_tree_key) decl) == NULL) 6171 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN); 6172 else if (outer_ctx 6173 && outer_ctx->region_type == ORT_WORKSHARE 6174 && outer_ctx->combined_loop 6175 && splay_tree_lookup (outer_ctx->variables, 6176 (splay_tree_key) decl) == NULL 6177 && !omp_check_private (outer_ctx, decl, false)) 6178 { 6179 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN); 6180 if (outer_ctx->outer_context 6181 && (outer_ctx->outer_context->region_type 6182 == ORT_COMBINED_PARALLEL) 6183 && splay_tree_lookup (outer_ctx->outer_context->variables, 6184 (splay_tree_key) decl) == NULL) 6185 omp_add_variable (outer_ctx->outer_context, decl, 6186 GOVD_SHARED | GOVD_SEEN); 6187 } 6188 goto do_add; 6189 case OMP_CLAUSE_REDUCTION: 6190 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT; 6191 check_non_private = "reduction"; 6192 goto do_add; 6193 case OMP_CLAUSE_LINEAR: 6194 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL, 6195 is_gimple_val, fb_rvalue) == GS_ERROR) 6196 { 6197 remove = true; 6198 break; 6199 } 6200 else 6201 { 6202 /* For combined #pragma omp parallel for simd, need to put 6203 lastprivate and perhaps firstprivate too on the 6204 parallel. Similarly for #pragma omp for simd. */ 6205 struct gimplify_omp_ctx *octx = outer_ctx; 6206 decl = NULL_TREE; 6207 if (omp_no_lastprivate (ctx)) 6208 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1; 6209 do 6210 { 6211 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c) 6212 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 6213 break; 6214 decl = OMP_CLAUSE_DECL (c); 6215 if (error_operand_p (decl)) 6216 { 6217 decl = NULL_TREE; 6218 break; 6219 } 6220 if (octx 6221 && octx->region_type == ORT_WORKSHARE 6222 && octx->combined_loop) 6223 { 6224 if (octx->outer_context 6225 && (octx->outer_context->region_type 6226 == ORT_COMBINED_PARALLEL 6227 || (octx->outer_context->region_type 6228 == ORT_COMBINED_TEAMS))) 6229 octx = octx->outer_context; 6230 else if (omp_check_private (octx, decl, false)) 6231 break; 6232 } 6233 else 6234 break; 6235 if (splay_tree_lookup (octx->variables, 6236 (splay_tree_key) decl) != NULL) 6237 { 6238 octx = NULL; 6239 break; 6240 } 6241 flags = GOVD_SEEN; 6242 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)) 6243 flags |= GOVD_FIRSTPRIVATE; 6244 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 6245 flags |= GOVD_LASTPRIVATE; 6246 omp_add_variable (octx, decl, flags); 6247 if (octx->outer_context == NULL) 6248 break; 6249 octx = octx->outer_context; 6250 } 6251 while (1); 6252 if (octx 6253 && decl 6254 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c) 6255 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))) 6256 omp_notice_variable (octx, decl, true); 6257 } 6258 flags = GOVD_LINEAR | GOVD_EXPLICIT; 6259 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c) 6260 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 6261 { 6262 notice_outer = false; 6263 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER; 6264 } 6265 goto do_add; 6266 6267 case OMP_CLAUSE_MAP: 6268 decl = OMP_CLAUSE_DECL (c); 6269 if (error_operand_p (decl)) 6270 { 6271 remove = true; 6272 break; 6273 } 6274 if (OMP_CLAUSE_SIZE (c) == NULL_TREE) 6275 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl) 6276 : TYPE_SIZE_UNIT (TREE_TYPE (decl)); 6277 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p, 6278 NULL, is_gimple_val, fb_rvalue) == GS_ERROR) 6279 { 6280 remove = true; 6281 break; 6282 } 6283 if (!DECL_P (decl)) 6284 { 6285 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, 6286 NULL, is_gimple_lvalue, fb_lvalue) 6287 == GS_ERROR) 6288 { 6289 remove = true; 6290 break; 6291 } 6292 break; 6293 } 6294 flags = GOVD_MAP | GOVD_EXPLICIT; 6295 goto do_add; 6296 6297 case OMP_CLAUSE_DEPEND: 6298 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR) 6299 { 6300 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p, 6301 NULL, is_gimple_val, fb_rvalue); 6302 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1); 6303 } 6304 if (error_operand_p (OMP_CLAUSE_DECL (c))) 6305 { 6306 remove = true; 6307 break; 6308 } 6309 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c)); 6310 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL, 6311 is_gimple_val, fb_rvalue) == GS_ERROR) 6312 { 6313 remove = true; 6314 break; 6315 } 6316 break; 6317 6318 case OMP_CLAUSE_TO: 6319 case OMP_CLAUSE_FROM: 6320 case OMP_CLAUSE__CACHE_: 6321 decl = OMP_CLAUSE_DECL (c); 6322 if (error_operand_p (decl)) 6323 { 6324 remove = true; 6325 break; 6326 } 6327 if (OMP_CLAUSE_SIZE (c) == NULL_TREE) 6328 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl) 6329 : TYPE_SIZE_UNIT (TREE_TYPE (decl)); 6330 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p, 6331 NULL, is_gimple_val, fb_rvalue) == GS_ERROR) 6332 { 6333 remove = true; 6334 break; 6335 } 6336 if (!DECL_P (decl)) 6337 { 6338 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, 6339 NULL, is_gimple_lvalue, fb_lvalue) 6340 == GS_ERROR) 6341 { 6342 remove = true; 6343 break; 6344 } 6345 break; 6346 } 6347 goto do_notice; 6348 6349 do_add: 6350 decl = OMP_CLAUSE_DECL (c); 6351 if (error_operand_p (decl)) 6352 { 6353 remove = true; 6354 break; 6355 } 6356 omp_add_variable (ctx, decl, flags); 6357 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION 6358 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 6359 { 6360 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c), 6361 GOVD_LOCAL | GOVD_SEEN); 6362 gimplify_omp_ctxp = ctx; 6363 push_gimplify_context (); 6364 6365 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL; 6366 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; 6367 6368 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c), 6369 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)); 6370 pop_gimplify_context 6371 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))); 6372 push_gimplify_context (); 6373 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c), 6374 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)); 6375 pop_gimplify_context 6376 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c))); 6377 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE; 6378 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE; 6379 6380 gimplify_omp_ctxp = outer_ctx; 6381 } 6382 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 6383 && OMP_CLAUSE_LASTPRIVATE_STMT (c)) 6384 { 6385 gimplify_omp_ctxp = ctx; 6386 push_gimplify_context (); 6387 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR) 6388 { 6389 tree bind = build3 (BIND_EXPR, void_type_node, NULL, 6390 NULL, NULL); 6391 TREE_SIDE_EFFECTS (bind) = 1; 6392 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c); 6393 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind; 6394 } 6395 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c), 6396 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)); 6397 pop_gimplify_context 6398 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))); 6399 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE; 6400 6401 gimplify_omp_ctxp = outer_ctx; 6402 } 6403 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 6404 && OMP_CLAUSE_LINEAR_STMT (c)) 6405 { 6406 gimplify_omp_ctxp = ctx; 6407 push_gimplify_context (); 6408 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR) 6409 { 6410 tree bind = build3 (BIND_EXPR, void_type_node, NULL, 6411 NULL, NULL); 6412 TREE_SIDE_EFFECTS (bind) = 1; 6413 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c); 6414 OMP_CLAUSE_LINEAR_STMT (c) = bind; 6415 } 6416 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c), 6417 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)); 6418 pop_gimplify_context 6419 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))); 6420 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE; 6421 6422 gimplify_omp_ctxp = outer_ctx; 6423 } 6424 if (notice_outer) 6425 goto do_notice; 6426 break; 6427 6428 case OMP_CLAUSE_COPYIN: 6429 case OMP_CLAUSE_COPYPRIVATE: 6430 decl = OMP_CLAUSE_DECL (c); 6431 if (error_operand_p (decl)) 6432 { 6433 remove = true; 6434 break; 6435 } 6436 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE 6437 && !remove 6438 && !omp_check_private (ctx, decl, true)) 6439 { 6440 remove = true; 6441 if (is_global_var (decl)) 6442 { 6443 if (DECL_THREAD_LOCAL_P (decl)) 6444 remove = false; 6445 else if (DECL_HAS_VALUE_EXPR_P (decl)) 6446 { 6447 tree value = get_base_address (DECL_VALUE_EXPR (decl)); 6448 6449 if (value 6450 && DECL_P (value) 6451 && DECL_THREAD_LOCAL_P (value)) 6452 remove = false; 6453 } 6454 } 6455 if (remove) 6456 error_at (OMP_CLAUSE_LOCATION (c), 6457 "copyprivate variable %qE is not threadprivate" 6458 " or private in outer context", DECL_NAME (decl)); 6459 } 6460 do_notice: 6461 if (outer_ctx) 6462 omp_notice_variable (outer_ctx, decl, true); 6463 if (check_non_private 6464 && region_type == ORT_WORKSHARE 6465 && omp_check_private (ctx, decl, false)) 6466 { 6467 error ("%s variable %qE is private in outer context", 6468 check_non_private, DECL_NAME (decl)); 6469 remove = true; 6470 } 6471 break; 6472 6473 case OMP_CLAUSE_FINAL: 6474 case OMP_CLAUSE_IF: 6475 OMP_CLAUSE_OPERAND (c, 0) 6476 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0)); 6477 /* Fall through. */ 6478 6479 case OMP_CLAUSE_SCHEDULE: 6480 case OMP_CLAUSE_NUM_THREADS: 6481 case OMP_CLAUSE_NUM_TEAMS: 6482 case OMP_CLAUSE_THREAD_LIMIT: 6483 case OMP_CLAUSE_DIST_SCHEDULE: 6484 case OMP_CLAUSE_DEVICE: 6485 case OMP_CLAUSE__CILK_FOR_COUNT_: 6486 case OMP_CLAUSE_ASYNC: 6487 case OMP_CLAUSE_WAIT: 6488 case OMP_CLAUSE_NUM_GANGS: 6489 case OMP_CLAUSE_NUM_WORKERS: 6490 case OMP_CLAUSE_VECTOR_LENGTH: 6491 case OMP_CLAUSE_GANG: 6492 case OMP_CLAUSE_WORKER: 6493 case OMP_CLAUSE_VECTOR: 6494 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL, 6495 is_gimple_val, fb_rvalue) == GS_ERROR) 6496 remove = true; 6497 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_GANG 6498 && gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL, 6499 is_gimple_val, fb_rvalue) == GS_ERROR) 6500 remove = true; 6501 break; 6502 6503 case OMP_CLAUSE_DEVICE_RESIDENT: 6504 case OMP_CLAUSE_USE_DEVICE: 6505 case OMP_CLAUSE_INDEPENDENT: 6506 remove = true; 6507 break; 6508 6509 case OMP_CLAUSE_NOWAIT: 6510 case OMP_CLAUSE_ORDERED: 6511 case OMP_CLAUSE_UNTIED: 6512 case OMP_CLAUSE_COLLAPSE: 6513 case OMP_CLAUSE_AUTO: 6514 case OMP_CLAUSE_SEQ: 6515 case OMP_CLAUSE_MERGEABLE: 6516 case OMP_CLAUSE_PROC_BIND: 6517 case OMP_CLAUSE_SAFELEN: 6518 break; 6519 6520 case OMP_CLAUSE_ALIGNED: 6521 decl = OMP_CLAUSE_DECL (c); 6522 if (error_operand_p (decl)) 6523 { 6524 remove = true; 6525 break; 6526 } 6527 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL, 6528 is_gimple_val, fb_rvalue) == GS_ERROR) 6529 { 6530 remove = true; 6531 break; 6532 } 6533 if (!is_global_var (decl) 6534 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE) 6535 omp_add_variable (ctx, decl, GOVD_ALIGNED); 6536 break; 6537 6538 case OMP_CLAUSE_DEFAULT: 6539 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c); 6540 break; 6541 6542 default: 6543 gcc_unreachable (); 6544 } 6545 6546 if (remove) 6547 *list_p = OMP_CLAUSE_CHAIN (c); 6548 else 6549 list_p = &OMP_CLAUSE_CHAIN (c); 6550 } 6551 6552 gimplify_omp_ctxp = ctx; 6553 } 6554 6555 struct gimplify_adjust_omp_clauses_data 6556 { 6557 tree *list_p; 6558 gimple_seq *pre_p; 6559 }; 6560 6561 /* For all variables that were not actually used within the context, 6562 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */ 6563 6564 static int 6565 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data) 6566 { 6567 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p; 6568 gimple_seq *pre_p 6569 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p; 6570 tree decl = (tree) n->key; 6571 unsigned flags = n->value; 6572 enum omp_clause_code code; 6573 tree clause; 6574 bool private_debug; 6575 6576 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL)) 6577 return 0; 6578 if ((flags & GOVD_SEEN) == 0) 6579 return 0; 6580 if (flags & GOVD_DEBUG_PRIVATE) 6581 { 6582 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE); 6583 private_debug = true; 6584 } 6585 else if (flags & GOVD_MAP) 6586 private_debug = false; 6587 else 6588 private_debug 6589 = lang_hooks.decls.omp_private_debug_clause (decl, 6590 !!(flags & GOVD_SHARED)); 6591 if (private_debug) 6592 code = OMP_CLAUSE_PRIVATE; 6593 else if (flags & GOVD_MAP) 6594 code = OMP_CLAUSE_MAP; 6595 else if (flags & GOVD_SHARED) 6596 { 6597 if (is_global_var (decl)) 6598 { 6599 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context; 6600 while (ctx != NULL) 6601 { 6602 splay_tree_node on 6603 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 6604 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE 6605 | GOVD_PRIVATE | GOVD_REDUCTION 6606 | GOVD_LINEAR | GOVD_MAP)) != 0) 6607 break; 6608 ctx = ctx->outer_context; 6609 } 6610 if (ctx == NULL) 6611 return 0; 6612 } 6613 code = OMP_CLAUSE_SHARED; 6614 } 6615 else if (flags & GOVD_PRIVATE) 6616 code = OMP_CLAUSE_PRIVATE; 6617 else if (flags & GOVD_FIRSTPRIVATE) 6618 code = OMP_CLAUSE_FIRSTPRIVATE; 6619 else if (flags & GOVD_LASTPRIVATE) 6620 code = OMP_CLAUSE_LASTPRIVATE; 6621 else if (flags & GOVD_ALIGNED) 6622 return 0; 6623 else 6624 gcc_unreachable (); 6625 6626 clause = build_omp_clause (input_location, code); 6627 OMP_CLAUSE_DECL (clause) = decl; 6628 OMP_CLAUSE_CHAIN (clause) = *list_p; 6629 if (private_debug) 6630 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1; 6631 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF)) 6632 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1; 6633 else if (code == OMP_CLAUSE_MAP) 6634 { 6635 OMP_CLAUSE_SET_MAP_KIND (clause, 6636 flags & GOVD_MAP_TO_ONLY 6637 ? GOMP_MAP_TO 6638 : GOMP_MAP_TOFROM); 6639 if (DECL_SIZE (decl) 6640 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 6641 { 6642 tree decl2 = DECL_VALUE_EXPR (decl); 6643 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); 6644 decl2 = TREE_OPERAND (decl2, 0); 6645 gcc_assert (DECL_P (decl2)); 6646 tree mem = build_simple_mem_ref (decl2); 6647 OMP_CLAUSE_DECL (clause) = mem; 6648 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl)); 6649 if (gimplify_omp_ctxp->outer_context) 6650 { 6651 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context; 6652 omp_notice_variable (ctx, decl2, true); 6653 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true); 6654 } 6655 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause), 6656 OMP_CLAUSE_MAP); 6657 OMP_CLAUSE_DECL (nc) = decl; 6658 OMP_CLAUSE_SIZE (nc) = size_zero_node; 6659 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER); 6660 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause); 6661 OMP_CLAUSE_CHAIN (clause) = nc; 6662 } 6663 else 6664 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl); 6665 } 6666 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0) 6667 { 6668 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE); 6669 OMP_CLAUSE_DECL (nc) = decl; 6670 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1; 6671 OMP_CLAUSE_CHAIN (nc) = *list_p; 6672 OMP_CLAUSE_CHAIN (clause) = nc; 6673 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 6674 gimplify_omp_ctxp = ctx->outer_context; 6675 lang_hooks.decls.omp_finish_clause (nc, pre_p); 6676 gimplify_omp_ctxp = ctx; 6677 } 6678 *list_p = clause; 6679 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 6680 gimplify_omp_ctxp = ctx->outer_context; 6681 lang_hooks.decls.omp_finish_clause (clause, pre_p); 6682 gimplify_omp_ctxp = ctx; 6683 return 0; 6684 } 6685 6686 static void 6687 gimplify_adjust_omp_clauses (gimple_seq *pre_p, tree *list_p) 6688 { 6689 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; 6690 tree c, decl; 6691 6692 while ((c = *list_p) != NULL) 6693 { 6694 splay_tree_node n; 6695 bool remove = false; 6696 6697 switch (OMP_CLAUSE_CODE (c)) 6698 { 6699 case OMP_CLAUSE_PRIVATE: 6700 case OMP_CLAUSE_SHARED: 6701 case OMP_CLAUSE_FIRSTPRIVATE: 6702 case OMP_CLAUSE_LINEAR: 6703 decl = OMP_CLAUSE_DECL (c); 6704 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 6705 remove = !(n->value & GOVD_SEEN); 6706 if (! remove) 6707 { 6708 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED; 6709 if ((n->value & GOVD_DEBUG_PRIVATE) 6710 || lang_hooks.decls.omp_private_debug_clause (decl, shared)) 6711 { 6712 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0 6713 || ((n->value & GOVD_DATA_SHARE_CLASS) 6714 == GOVD_PRIVATE)); 6715 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE); 6716 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1; 6717 } 6718 } 6719 break; 6720 6721 case OMP_CLAUSE_LASTPRIVATE: 6722 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to 6723 accurately reflect the presence of a FIRSTPRIVATE clause. */ 6724 decl = OMP_CLAUSE_DECL (c); 6725 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 6726 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) 6727 = (n->value & GOVD_FIRSTPRIVATE) != 0; 6728 if (omp_no_lastprivate (ctx)) 6729 { 6730 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)) 6731 remove = true; 6732 else 6733 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_PRIVATE; 6734 } 6735 break; 6736 6737 case OMP_CLAUSE_ALIGNED: 6738 decl = OMP_CLAUSE_DECL (c); 6739 if (!is_global_var (decl)) 6740 { 6741 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 6742 remove = n == NULL || !(n->value & GOVD_SEEN); 6743 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE) 6744 { 6745 struct gimplify_omp_ctx *octx; 6746 if (n != NULL 6747 && (n->value & (GOVD_DATA_SHARE_CLASS 6748 & ~GOVD_FIRSTPRIVATE))) 6749 remove = true; 6750 else 6751 for (octx = ctx->outer_context; octx; 6752 octx = octx->outer_context) 6753 { 6754 n = splay_tree_lookup (octx->variables, 6755 (splay_tree_key) decl); 6756 if (n == NULL) 6757 continue; 6758 if (n->value & GOVD_LOCAL) 6759 break; 6760 /* We have to avoid assigning a shared variable 6761 to itself when trying to add 6762 __builtin_assume_aligned. */ 6763 if (n->value & GOVD_SHARED) 6764 { 6765 remove = true; 6766 break; 6767 } 6768 } 6769 } 6770 } 6771 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) 6772 { 6773 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 6774 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0) 6775 remove = true; 6776 } 6777 break; 6778 6779 case OMP_CLAUSE_MAP: 6780 decl = OMP_CLAUSE_DECL (c); 6781 if (!DECL_P (decl)) 6782 break; 6783 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl); 6784 if (ctx->region_type == ORT_TARGET && !(n->value & GOVD_SEEN)) 6785 remove = true; 6786 else if (DECL_SIZE (decl) 6787 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST 6788 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER) 6789 { 6790 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because 6791 for these, TREE_CODE (DECL_SIZE (decl)) will always be 6792 INTEGER_CST. */ 6793 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR); 6794 6795 tree decl2 = DECL_VALUE_EXPR (decl); 6796 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); 6797 decl2 = TREE_OPERAND (decl2, 0); 6798 gcc_assert (DECL_P (decl2)); 6799 tree mem = build_simple_mem_ref (decl2); 6800 OMP_CLAUSE_DECL (c) = mem; 6801 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl)); 6802 if (ctx->outer_context) 6803 { 6804 omp_notice_variable (ctx->outer_context, decl2, true); 6805 omp_notice_variable (ctx->outer_context, 6806 OMP_CLAUSE_SIZE (c), true); 6807 } 6808 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c), 6809 OMP_CLAUSE_MAP); 6810 OMP_CLAUSE_DECL (nc) = decl; 6811 OMP_CLAUSE_SIZE (nc) = size_zero_node; 6812 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER); 6813 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c); 6814 OMP_CLAUSE_CHAIN (c) = nc; 6815 c = nc; 6816 } 6817 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE) 6818 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl); 6819 break; 6820 6821 case OMP_CLAUSE_TO: 6822 case OMP_CLAUSE_FROM: 6823 case OMP_CLAUSE__CACHE_: 6824 decl = OMP_CLAUSE_DECL (c); 6825 if (!DECL_P (decl)) 6826 break; 6827 if (DECL_SIZE (decl) 6828 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 6829 { 6830 tree decl2 = DECL_VALUE_EXPR (decl); 6831 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); 6832 decl2 = TREE_OPERAND (decl2, 0); 6833 gcc_assert (DECL_P (decl2)); 6834 tree mem = build_simple_mem_ref (decl2); 6835 OMP_CLAUSE_DECL (c) = mem; 6836 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl)); 6837 if (ctx->outer_context) 6838 { 6839 omp_notice_variable (ctx->outer_context, decl2, true); 6840 omp_notice_variable (ctx->outer_context, 6841 OMP_CLAUSE_SIZE (c), true); 6842 } 6843 } 6844 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE) 6845 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl); 6846 break; 6847 6848 case OMP_CLAUSE_REDUCTION: 6849 case OMP_CLAUSE_COPYIN: 6850 case OMP_CLAUSE_COPYPRIVATE: 6851 case OMP_CLAUSE_IF: 6852 case OMP_CLAUSE_NUM_THREADS: 6853 case OMP_CLAUSE_NUM_TEAMS: 6854 case OMP_CLAUSE_THREAD_LIMIT: 6855 case OMP_CLAUSE_DIST_SCHEDULE: 6856 case OMP_CLAUSE_DEVICE: 6857 case OMP_CLAUSE_SCHEDULE: 6858 case OMP_CLAUSE_NOWAIT: 6859 case OMP_CLAUSE_ORDERED: 6860 case OMP_CLAUSE_DEFAULT: 6861 case OMP_CLAUSE_UNTIED: 6862 case OMP_CLAUSE_COLLAPSE: 6863 case OMP_CLAUSE_FINAL: 6864 case OMP_CLAUSE_MERGEABLE: 6865 case OMP_CLAUSE_PROC_BIND: 6866 case OMP_CLAUSE_SAFELEN: 6867 case OMP_CLAUSE_DEPEND: 6868 case OMP_CLAUSE__CILK_FOR_COUNT_: 6869 case OMP_CLAUSE_ASYNC: 6870 case OMP_CLAUSE_WAIT: 6871 case OMP_CLAUSE_DEVICE_RESIDENT: 6872 case OMP_CLAUSE_USE_DEVICE: 6873 case OMP_CLAUSE_INDEPENDENT: 6874 case OMP_CLAUSE_NUM_GANGS: 6875 case OMP_CLAUSE_NUM_WORKERS: 6876 case OMP_CLAUSE_VECTOR_LENGTH: 6877 case OMP_CLAUSE_GANG: 6878 case OMP_CLAUSE_WORKER: 6879 case OMP_CLAUSE_VECTOR: 6880 case OMP_CLAUSE_AUTO: 6881 case OMP_CLAUSE_SEQ: 6882 break; 6883 6884 default: 6885 gcc_unreachable (); 6886 } 6887 6888 if (remove) 6889 *list_p = OMP_CLAUSE_CHAIN (c); 6890 else 6891 list_p = &OMP_CLAUSE_CHAIN (c); 6892 } 6893 6894 /* Add in any implicit data sharing. */ 6895 struct gimplify_adjust_omp_clauses_data data; 6896 data.list_p = list_p; 6897 data.pre_p = pre_p; 6898 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data); 6899 6900 gimplify_omp_ctxp = ctx->outer_context; 6901 delete_omp_context (ctx); 6902 } 6903 6904 /* Gimplify OACC_CACHE. */ 6905 6906 static void 6907 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p) 6908 { 6909 tree expr = *expr_p; 6910 6911 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_WORKSHARE); 6912 gimplify_adjust_omp_clauses (pre_p, &OACC_CACHE_CLAUSES (expr)); 6913 6914 /* TODO: Do something sensible with this information. */ 6915 6916 *expr_p = NULL_TREE; 6917 } 6918 6919 /* Gimplify the contents of an OMP_PARALLEL statement. This involves 6920 gimplification of the body, as well as scanning the body for used 6921 variables. We need to do this scan now, because variable-sized 6922 decls will be decomposed during gimplification. */ 6923 6924 static void 6925 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p) 6926 { 6927 tree expr = *expr_p; 6928 gimple g; 6929 gimple_seq body = NULL; 6930 6931 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, 6932 OMP_PARALLEL_COMBINED (expr) 6933 ? ORT_COMBINED_PARALLEL 6934 : ORT_PARALLEL); 6935 6936 push_gimplify_context (); 6937 6938 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body); 6939 if (gimple_code (g) == GIMPLE_BIND) 6940 pop_gimplify_context (g); 6941 else 6942 pop_gimplify_context (NULL); 6943 6944 gimplify_adjust_omp_clauses (pre_p, &OMP_PARALLEL_CLAUSES (expr)); 6945 6946 g = gimple_build_omp_parallel (body, 6947 OMP_PARALLEL_CLAUSES (expr), 6948 NULL_TREE, NULL_TREE); 6949 if (OMP_PARALLEL_COMBINED (expr)) 6950 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED); 6951 gimplify_seq_add_stmt (pre_p, g); 6952 *expr_p = NULL_TREE; 6953 } 6954 6955 /* Gimplify the contents of an OMP_TASK statement. This involves 6956 gimplification of the body, as well as scanning the body for used 6957 variables. We need to do this scan now, because variable-sized 6958 decls will be decomposed during gimplification. */ 6959 6960 static void 6961 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p) 6962 { 6963 tree expr = *expr_p; 6964 gimple g; 6965 gimple_seq body = NULL; 6966 6967 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p, 6968 find_omp_clause (OMP_TASK_CLAUSES (expr), 6969 OMP_CLAUSE_UNTIED) 6970 ? ORT_UNTIED_TASK : ORT_TASK); 6971 6972 push_gimplify_context (); 6973 6974 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body); 6975 if (gimple_code (g) == GIMPLE_BIND) 6976 pop_gimplify_context (g); 6977 else 6978 pop_gimplify_context (NULL); 6979 6980 gimplify_adjust_omp_clauses (pre_p, &OMP_TASK_CLAUSES (expr)); 6981 6982 g = gimple_build_omp_task (body, 6983 OMP_TASK_CLAUSES (expr), 6984 NULL_TREE, NULL_TREE, 6985 NULL_TREE, NULL_TREE, NULL_TREE); 6986 gimplify_seq_add_stmt (pre_p, g); 6987 *expr_p = NULL_TREE; 6988 } 6989 6990 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD 6991 with non-NULL OMP_FOR_INIT. */ 6992 6993 static tree 6994 find_combined_omp_for (tree *tp, int *walk_subtrees, void *) 6995 { 6996 *walk_subtrees = 0; 6997 switch (TREE_CODE (*tp)) 6998 { 6999 case OMP_FOR: 7000 *walk_subtrees = 1; 7001 /* FALLTHRU */ 7002 case OMP_SIMD: 7003 if (OMP_FOR_INIT (*tp) != NULL_TREE) 7004 return *tp; 7005 break; 7006 case BIND_EXPR: 7007 case STATEMENT_LIST: 7008 case OMP_PARALLEL: 7009 *walk_subtrees = 1; 7010 break; 7011 default: 7012 break; 7013 } 7014 return NULL_TREE; 7015 } 7016 7017 /* Gimplify the gross structure of an OMP_FOR statement. */ 7018 7019 static enum gimplify_status 7020 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p) 7021 { 7022 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t; 7023 enum gimplify_status ret = GS_ALL_DONE; 7024 enum gimplify_status tret; 7025 gomp_for *gfor; 7026 gimple_seq for_body, for_pre_body; 7027 int i; 7028 bool simd; 7029 bitmap has_decl_expr = NULL; 7030 7031 orig_for_stmt = for_stmt = *expr_p; 7032 7033 switch (TREE_CODE (for_stmt)) 7034 { 7035 case OMP_FOR: 7036 case CILK_FOR: 7037 case OMP_DISTRIBUTE: 7038 case OACC_LOOP: 7039 simd = false; 7040 break; 7041 case OMP_SIMD: 7042 case CILK_SIMD: 7043 simd = true; 7044 break; 7045 default: 7046 gcc_unreachable (); 7047 } 7048 7049 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear 7050 clause for the IV. */ 7051 if (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1) 7052 { 7053 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0); 7054 gcc_assert (TREE_CODE (t) == MODIFY_EXPR); 7055 decl = TREE_OPERAND (t, 0); 7056 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c)) 7057 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 7058 && OMP_CLAUSE_DECL (c) == decl) 7059 { 7060 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1; 7061 break; 7062 } 7063 } 7064 7065 if (OMP_FOR_INIT (for_stmt) == NULL_TREE) 7066 { 7067 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP); 7068 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), 7069 find_combined_omp_for, NULL, NULL); 7070 if (inner_for_stmt == NULL_TREE) 7071 { 7072 gcc_assert (seen_error ()); 7073 *expr_p = NULL_TREE; 7074 return GS_ERROR; 7075 } 7076 } 7077 7078 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, 7079 simd ? ORT_SIMD : ORT_WORKSHARE); 7080 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE) 7081 gimplify_omp_ctxp->distribute = true; 7082 7083 /* Handle OMP_FOR_INIT. */ 7084 for_pre_body = NULL; 7085 if (simd && OMP_FOR_PRE_BODY (for_stmt)) 7086 { 7087 has_decl_expr = BITMAP_ALLOC (NULL); 7088 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR 7089 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt))) 7090 == VAR_DECL) 7091 { 7092 t = OMP_FOR_PRE_BODY (for_stmt); 7093 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t))); 7094 } 7095 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST) 7096 { 7097 tree_stmt_iterator si; 7098 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si); 7099 tsi_next (&si)) 7100 { 7101 t = tsi_stmt (si); 7102 if (TREE_CODE (t) == DECL_EXPR 7103 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL) 7104 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t))); 7105 } 7106 } 7107 } 7108 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body); 7109 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE; 7110 7111 if (OMP_FOR_INIT (for_stmt) == NULL_TREE) 7112 { 7113 for_stmt = inner_for_stmt; 7114 gimplify_omp_ctxp->combined_loop = true; 7115 } 7116 7117 for_body = NULL; 7118 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) 7119 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt))); 7120 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) 7121 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt))); 7122 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) 7123 { 7124 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); 7125 gcc_assert (TREE_CODE (t) == MODIFY_EXPR); 7126 decl = TREE_OPERAND (t, 0); 7127 gcc_assert (DECL_P (decl)); 7128 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)) 7129 || POINTER_TYPE_P (TREE_TYPE (decl))); 7130 7131 /* Make sure the iteration variable is private. */ 7132 tree c = NULL_TREE; 7133 tree c2 = NULL_TREE; 7134 if (orig_for_stmt != for_stmt) 7135 /* Do this only on innermost construct for combined ones. */; 7136 else if (simd) 7137 { 7138 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables, 7139 (splay_tree_key)decl); 7140 omp_is_private (gimplify_omp_ctxp, decl, 7141 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) 7142 != 1)); 7143 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0) 7144 omp_notice_variable (gimplify_omp_ctxp, decl, true); 7145 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1) 7146 { 7147 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR); 7148 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1; 7149 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN; 7150 if ((has_decl_expr 7151 && bitmap_bit_p (has_decl_expr, DECL_UID (decl))) 7152 || omp_no_lastprivate (gimplify_omp_ctxp)) 7153 { 7154 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1; 7155 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER; 7156 } 7157 struct gimplify_omp_ctx *outer 7158 = gimplify_omp_ctxp->outer_context; 7159 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 7160 { 7161 if (outer->region_type == ORT_WORKSHARE 7162 && outer->combined_loop) 7163 { 7164 n = splay_tree_lookup (outer->variables, 7165 (splay_tree_key)decl); 7166 if (n != NULL && (n->value & GOVD_LOCAL) != 0) 7167 { 7168 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1; 7169 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER; 7170 } 7171 } 7172 } 7173 7174 OMP_CLAUSE_DECL (c) = decl; 7175 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt); 7176 OMP_FOR_CLAUSES (for_stmt) = c; 7177 omp_add_variable (gimplify_omp_ctxp, decl, flags); 7178 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 7179 { 7180 if (outer->region_type == ORT_WORKSHARE 7181 && outer->combined_loop) 7182 { 7183 if (outer->outer_context 7184 && (outer->outer_context->region_type 7185 == ORT_COMBINED_PARALLEL)) 7186 outer = outer->outer_context; 7187 else if (omp_check_private (outer, decl, false)) 7188 outer = NULL; 7189 } 7190 else if (outer->region_type != ORT_COMBINED_PARALLEL) 7191 outer = NULL; 7192 if (outer) 7193 { 7194 n = splay_tree_lookup (outer->variables, 7195 (splay_tree_key)decl); 7196 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0) 7197 { 7198 omp_add_variable (outer, decl, 7199 GOVD_LASTPRIVATE | GOVD_SEEN); 7200 if (outer->outer_context) 7201 omp_notice_variable (outer->outer_context, decl, 7202 true); 7203 } 7204 } 7205 } 7206 } 7207 else 7208 { 7209 bool lastprivate 7210 = (!has_decl_expr 7211 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl))) 7212 && !omp_no_lastprivate (gimplify_omp_ctxp); 7213 struct gimplify_omp_ctx *outer 7214 = gimplify_omp_ctxp->outer_context; 7215 if (outer && lastprivate) 7216 { 7217 if (outer->region_type == ORT_WORKSHARE 7218 && outer->combined_loop) 7219 { 7220 n = splay_tree_lookup (outer->variables, 7221 (splay_tree_key)decl); 7222 if (n != NULL && (n->value & GOVD_LOCAL) != 0) 7223 { 7224 lastprivate = false; 7225 outer = NULL; 7226 } 7227 else if (outer->outer_context 7228 && (outer->outer_context->region_type 7229 == ORT_COMBINED_PARALLEL)) 7230 outer = outer->outer_context; 7231 else if (omp_check_private (outer, decl, false)) 7232 outer = NULL; 7233 } 7234 else if (outer->region_type != ORT_COMBINED_PARALLEL) 7235 outer = NULL; 7236 if (outer) 7237 { 7238 n = splay_tree_lookup (outer->variables, 7239 (splay_tree_key)decl); 7240 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0) 7241 { 7242 omp_add_variable (outer, decl, 7243 GOVD_LASTPRIVATE | GOVD_SEEN); 7244 if (outer->outer_context) 7245 omp_notice_variable (outer->outer_context, decl, 7246 true); 7247 } 7248 } 7249 } 7250 7251 c = build_omp_clause (input_location, 7252 lastprivate ? OMP_CLAUSE_LASTPRIVATE 7253 : OMP_CLAUSE_PRIVATE); 7254 OMP_CLAUSE_DECL (c) = decl; 7255 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt); 7256 OMP_FOR_CLAUSES (for_stmt) = c; 7257 omp_add_variable (gimplify_omp_ctxp, decl, 7258 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE) 7259 | GOVD_EXPLICIT | GOVD_SEEN); 7260 c = NULL_TREE; 7261 } 7262 } 7263 else if (omp_is_private (gimplify_omp_ctxp, decl, 0)) 7264 omp_notice_variable (gimplify_omp_ctxp, decl, true); 7265 else 7266 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN); 7267 7268 /* If DECL is not a gimple register, create a temporary variable to act 7269 as an iteration counter. This is valid, since DECL cannot be 7270 modified in the body of the loop. Similarly for any iteration vars 7271 in simd with collapse > 1 where the iterator vars must be 7272 lastprivate. */ 7273 if (orig_for_stmt != for_stmt) 7274 var = decl; 7275 else if (!is_gimple_reg (decl) 7276 || (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)) 7277 { 7278 var = create_tmp_var (TREE_TYPE (decl), get_name (decl)); 7279 TREE_OPERAND (t, 0) = var; 7280 7281 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var)); 7282 7283 if (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1) 7284 { 7285 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR); 7286 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1; 7287 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1; 7288 OMP_CLAUSE_DECL (c2) = var; 7289 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt); 7290 OMP_FOR_CLAUSES (for_stmt) = c2; 7291 omp_add_variable (gimplify_omp_ctxp, var, 7292 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN); 7293 if (c == NULL_TREE) 7294 { 7295 c = c2; 7296 c2 = NULL_TREE; 7297 } 7298 } 7299 else 7300 omp_add_variable (gimplify_omp_ctxp, var, 7301 GOVD_PRIVATE | GOVD_SEEN); 7302 } 7303 else 7304 var = decl; 7305 7306 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, 7307 is_gimple_val, fb_rvalue); 7308 ret = MIN (ret, tret); 7309 if (ret == GS_ERROR) 7310 return ret; 7311 7312 /* Handle OMP_FOR_COND. */ 7313 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i); 7314 gcc_assert (COMPARISON_CLASS_P (t)); 7315 gcc_assert (TREE_OPERAND (t, 0) == decl); 7316 7317 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, 7318 is_gimple_val, fb_rvalue); 7319 ret = MIN (ret, tret); 7320 7321 /* Handle OMP_FOR_INCR. */ 7322 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 7323 switch (TREE_CODE (t)) 7324 { 7325 case PREINCREMENT_EXPR: 7326 case POSTINCREMENT_EXPR: 7327 { 7328 tree decl = TREE_OPERAND (t, 0); 7329 /* c_omp_for_incr_canonicalize_ptr() should have been 7330 called to massage things appropriately. */ 7331 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl))); 7332 7333 if (orig_for_stmt != for_stmt) 7334 break; 7335 t = build_int_cst (TREE_TYPE (decl), 1); 7336 if (c) 7337 OMP_CLAUSE_LINEAR_STEP (c) = t; 7338 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t); 7339 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t); 7340 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t; 7341 break; 7342 } 7343 7344 case PREDECREMENT_EXPR: 7345 case POSTDECREMENT_EXPR: 7346 /* c_omp_for_incr_canonicalize_ptr() should have been 7347 called to massage things appropriately. */ 7348 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl))); 7349 if (orig_for_stmt != for_stmt) 7350 break; 7351 t = build_int_cst (TREE_TYPE (decl), -1); 7352 if (c) 7353 OMP_CLAUSE_LINEAR_STEP (c) = t; 7354 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t); 7355 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t); 7356 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t; 7357 break; 7358 7359 case MODIFY_EXPR: 7360 gcc_assert (TREE_OPERAND (t, 0) == decl); 7361 TREE_OPERAND (t, 0) = var; 7362 7363 t = TREE_OPERAND (t, 1); 7364 switch (TREE_CODE (t)) 7365 { 7366 case PLUS_EXPR: 7367 if (TREE_OPERAND (t, 1) == decl) 7368 { 7369 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0); 7370 TREE_OPERAND (t, 0) = var; 7371 break; 7372 } 7373 7374 /* Fallthru. */ 7375 case MINUS_EXPR: 7376 case POINTER_PLUS_EXPR: 7377 gcc_assert (TREE_OPERAND (t, 0) == decl); 7378 TREE_OPERAND (t, 0) = var; 7379 break; 7380 default: 7381 gcc_unreachable (); 7382 } 7383 7384 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL, 7385 is_gimple_val, fb_rvalue); 7386 ret = MIN (ret, tret); 7387 if (c) 7388 { 7389 tree step = TREE_OPERAND (t, 1); 7390 tree stept = TREE_TYPE (decl); 7391 if (POINTER_TYPE_P (stept)) 7392 stept = sizetype; 7393 step = fold_convert (stept, step); 7394 if (TREE_CODE (t) == MINUS_EXPR) 7395 step = fold_build1 (NEGATE_EXPR, stept, step); 7396 OMP_CLAUSE_LINEAR_STEP (c) = step; 7397 if (step != TREE_OPERAND (t, 1)) 7398 { 7399 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), 7400 &for_pre_body, NULL, 7401 is_gimple_val, fb_rvalue); 7402 ret = MIN (ret, tret); 7403 } 7404 } 7405 break; 7406 7407 default: 7408 gcc_unreachable (); 7409 } 7410 7411 if (c2) 7412 { 7413 gcc_assert (c); 7414 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c); 7415 } 7416 7417 if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1) 7418 && orig_for_stmt == for_stmt) 7419 { 7420 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c)) 7421 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 7422 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL) 7423 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 7424 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c) 7425 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL)) 7426 && OMP_CLAUSE_DECL (c) == decl) 7427 { 7428 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 7429 gcc_assert (TREE_CODE (t) == MODIFY_EXPR); 7430 gcc_assert (TREE_OPERAND (t, 0) == var); 7431 t = TREE_OPERAND (t, 1); 7432 gcc_assert (TREE_CODE (t) == PLUS_EXPR 7433 || TREE_CODE (t) == MINUS_EXPR 7434 || TREE_CODE (t) == POINTER_PLUS_EXPR); 7435 gcc_assert (TREE_OPERAND (t, 0) == var); 7436 t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl, 7437 TREE_OPERAND (t, 1)); 7438 gimple_seq *seq; 7439 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE) 7440 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c); 7441 else 7442 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c); 7443 gimplify_assign (decl, t, seq); 7444 } 7445 } 7446 } 7447 7448 BITMAP_FREE (has_decl_expr); 7449 7450 gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body); 7451 7452 if (orig_for_stmt != for_stmt) 7453 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) 7454 { 7455 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); 7456 decl = TREE_OPERAND (t, 0); 7457 var = create_tmp_var (TREE_TYPE (decl), get_name (decl)); 7458 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN); 7459 TREE_OPERAND (t, 0) = var; 7460 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 7461 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1)); 7462 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var; 7463 } 7464 7465 gimplify_adjust_omp_clauses (pre_p, &OMP_FOR_CLAUSES (orig_for_stmt)); 7466 7467 int kind; 7468 switch (TREE_CODE (orig_for_stmt)) 7469 { 7470 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break; 7471 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break; 7472 case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break; 7473 case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break; 7474 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break; 7475 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break; 7476 default: 7477 gcc_unreachable (); 7478 } 7479 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt), 7480 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)), 7481 for_pre_body); 7482 if (orig_for_stmt != for_stmt) 7483 gimple_omp_for_set_combined_p (gfor, true); 7484 if (gimplify_omp_ctxp 7485 && (gimplify_omp_ctxp->combined_loop 7486 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL 7487 && gimplify_omp_ctxp->outer_context 7488 && gimplify_omp_ctxp->outer_context->combined_loop))) 7489 { 7490 gimple_omp_for_set_combined_into_p (gfor, true); 7491 if (gimplify_omp_ctxp->combined_loop) 7492 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD); 7493 else 7494 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR); 7495 } 7496 7497 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++) 7498 { 7499 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i); 7500 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0)); 7501 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1)); 7502 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i); 7503 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t)); 7504 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1)); 7505 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i); 7506 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1)); 7507 } 7508 7509 gimplify_seq_add_stmt (pre_p, gfor); 7510 if (ret != GS_ALL_DONE) 7511 return GS_ERROR; 7512 *expr_p = NULL_TREE; 7513 return GS_ALL_DONE; 7514 } 7515 7516 /* Gimplify the gross structure of several OMP constructs. */ 7517 7518 static void 7519 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p) 7520 { 7521 tree expr = *expr_p; 7522 gimple stmt; 7523 gimple_seq body = NULL; 7524 enum omp_region_type ort; 7525 7526 switch (TREE_CODE (expr)) 7527 { 7528 case OMP_SECTIONS: 7529 case OMP_SINGLE: 7530 ort = ORT_WORKSHARE; 7531 break; 7532 case OACC_KERNELS: 7533 case OACC_PARALLEL: 7534 case OMP_TARGET: 7535 ort = ORT_TARGET; 7536 break; 7537 case OACC_DATA: 7538 case OMP_TARGET_DATA: 7539 ort = ORT_TARGET_DATA; 7540 break; 7541 case OMP_TEAMS: 7542 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS; 7543 break; 7544 default: 7545 gcc_unreachable (); 7546 } 7547 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort); 7548 if (ort == ORT_TARGET || ort == ORT_TARGET_DATA) 7549 { 7550 push_gimplify_context (); 7551 gimple g = gimplify_and_return_first (OMP_BODY (expr), &body); 7552 if (gimple_code (g) == GIMPLE_BIND) 7553 pop_gimplify_context (g); 7554 else 7555 pop_gimplify_context (NULL); 7556 if (ort == ORT_TARGET_DATA) 7557 { 7558 enum built_in_function end_ix; 7559 switch (TREE_CODE (expr)) 7560 { 7561 case OACC_DATA: 7562 end_ix = BUILT_IN_GOACC_DATA_END; 7563 break; 7564 case OMP_TARGET_DATA: 7565 end_ix = BUILT_IN_GOMP_TARGET_END_DATA; 7566 break; 7567 default: 7568 gcc_unreachable (); 7569 } 7570 tree fn = builtin_decl_explicit (end_ix); 7571 g = gimple_build_call (fn, 0); 7572 gimple_seq cleanup = NULL; 7573 gimple_seq_add_stmt (&cleanup, g); 7574 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY); 7575 body = NULL; 7576 gimple_seq_add_stmt (&body, g); 7577 } 7578 } 7579 else 7580 gimplify_and_add (OMP_BODY (expr), &body); 7581 gimplify_adjust_omp_clauses (pre_p, &OMP_CLAUSES (expr)); 7582 7583 switch (TREE_CODE (expr)) 7584 { 7585 case OACC_DATA: 7586 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA, 7587 OMP_CLAUSES (expr)); 7588 break; 7589 case OACC_KERNELS: 7590 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS, 7591 OMP_CLAUSES (expr)); 7592 break; 7593 case OACC_PARALLEL: 7594 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL, 7595 OMP_CLAUSES (expr)); 7596 break; 7597 case OMP_SECTIONS: 7598 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr)); 7599 break; 7600 case OMP_SINGLE: 7601 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr)); 7602 break; 7603 case OMP_TARGET: 7604 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION, 7605 OMP_CLAUSES (expr)); 7606 break; 7607 case OMP_TARGET_DATA: 7608 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA, 7609 OMP_CLAUSES (expr)); 7610 break; 7611 case OMP_TEAMS: 7612 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr)); 7613 break; 7614 default: 7615 gcc_unreachable (); 7616 } 7617 7618 gimplify_seq_add_stmt (pre_p, stmt); 7619 *expr_p = NULL_TREE; 7620 } 7621 7622 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP 7623 target update constructs. */ 7624 7625 static void 7626 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p) 7627 { 7628 tree expr = *expr_p, clauses; 7629 int kind; 7630 gomp_target *stmt; 7631 7632 switch (TREE_CODE (expr)) 7633 { 7634 case OACC_ENTER_DATA: 7635 clauses = OACC_ENTER_DATA_CLAUSES (expr); 7636 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA; 7637 break; 7638 case OACC_EXIT_DATA: 7639 clauses = OACC_EXIT_DATA_CLAUSES (expr); 7640 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA; 7641 break; 7642 case OACC_UPDATE: 7643 clauses = OACC_UPDATE_CLAUSES (expr); 7644 kind = GF_OMP_TARGET_KIND_OACC_UPDATE; 7645 break; 7646 case OMP_TARGET_UPDATE: 7647 clauses = OMP_TARGET_UPDATE_CLAUSES (expr); 7648 kind = GF_OMP_TARGET_KIND_UPDATE; 7649 break; 7650 default: 7651 gcc_unreachable (); 7652 } 7653 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_WORKSHARE); 7654 gimplify_adjust_omp_clauses (pre_p, &clauses); 7655 stmt = gimple_build_omp_target (NULL, kind, clauses); 7656 7657 gimplify_seq_add_stmt (pre_p, stmt); 7658 *expr_p = NULL_TREE; 7659 } 7660 7661 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have 7662 stabilized the lhs of the atomic operation as *ADDR. Return true if 7663 EXPR is this stabilized form. */ 7664 7665 static bool 7666 goa_lhs_expr_p (tree expr, tree addr) 7667 { 7668 /* Also include casts to other type variants. The C front end is fond 7669 of adding these for e.g. volatile variables. This is like 7670 STRIP_TYPE_NOPS but includes the main variant lookup. */ 7671 STRIP_USELESS_TYPE_CONVERSION (expr); 7672 7673 if (TREE_CODE (expr) == INDIRECT_REF) 7674 { 7675 expr = TREE_OPERAND (expr, 0); 7676 while (expr != addr 7677 && (CONVERT_EXPR_P (expr) 7678 || TREE_CODE (expr) == NON_LVALUE_EXPR) 7679 && TREE_CODE (expr) == TREE_CODE (addr) 7680 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr))) 7681 { 7682 expr = TREE_OPERAND (expr, 0); 7683 addr = TREE_OPERAND (addr, 0); 7684 } 7685 if (expr == addr) 7686 return true; 7687 return (TREE_CODE (addr) == ADDR_EXPR 7688 && TREE_CODE (expr) == ADDR_EXPR 7689 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0)); 7690 } 7691 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0)) 7692 return true; 7693 return false; 7694 } 7695 7696 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an 7697 expression does not involve the lhs, evaluate it into a temporary. 7698 Return 1 if the lhs appeared as a subexpression, 0 if it did not, 7699 or -1 if an error was encountered. */ 7700 7701 static int 7702 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr, 7703 tree lhs_var) 7704 { 7705 tree expr = *expr_p; 7706 int saw_lhs; 7707 7708 if (goa_lhs_expr_p (expr, lhs_addr)) 7709 { 7710 *expr_p = lhs_var; 7711 return 1; 7712 } 7713 if (is_gimple_val (expr)) 7714 return 0; 7715 7716 saw_lhs = 0; 7717 switch (TREE_CODE_CLASS (TREE_CODE (expr))) 7718 { 7719 case tcc_binary: 7720 case tcc_comparison: 7721 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr, 7722 lhs_var); 7723 case tcc_unary: 7724 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr, 7725 lhs_var); 7726 break; 7727 case tcc_expression: 7728 switch (TREE_CODE (expr)) 7729 { 7730 case TRUTH_ANDIF_EXPR: 7731 case TRUTH_ORIF_EXPR: 7732 case TRUTH_AND_EXPR: 7733 case TRUTH_OR_EXPR: 7734 case TRUTH_XOR_EXPR: 7735 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, 7736 lhs_addr, lhs_var); 7737 case TRUTH_NOT_EXPR: 7738 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, 7739 lhs_addr, lhs_var); 7740 break; 7741 case COMPOUND_EXPR: 7742 /* Break out any preevaluations from cp_build_modify_expr. */ 7743 for (; TREE_CODE (expr) == COMPOUND_EXPR; 7744 expr = TREE_OPERAND (expr, 1)) 7745 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p); 7746 *expr_p = expr; 7747 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var); 7748 default: 7749 break; 7750 } 7751 break; 7752 default: 7753 break; 7754 } 7755 7756 if (saw_lhs == 0) 7757 { 7758 enum gimplify_status gs; 7759 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue); 7760 if (gs != GS_ALL_DONE) 7761 saw_lhs = -1; 7762 } 7763 7764 return saw_lhs; 7765 } 7766 7767 /* Gimplify an OMP_ATOMIC statement. */ 7768 7769 static enum gimplify_status 7770 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p) 7771 { 7772 tree addr = TREE_OPERAND (*expr_p, 0); 7773 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ 7774 ? NULL : TREE_OPERAND (*expr_p, 1); 7775 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr))); 7776 tree tmp_load; 7777 gomp_atomic_load *loadstmt; 7778 gomp_atomic_store *storestmt; 7779 7780 tmp_load = create_tmp_reg (type); 7781 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0) 7782 return GS_ERROR; 7783 7784 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue) 7785 != GS_ALL_DONE) 7786 return GS_ERROR; 7787 7788 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr); 7789 gimplify_seq_add_stmt (pre_p, loadstmt); 7790 if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue) 7791 != GS_ALL_DONE) 7792 return GS_ERROR; 7793 7794 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ) 7795 rhs = tmp_load; 7796 storestmt = gimple_build_omp_atomic_store (rhs); 7797 gimplify_seq_add_stmt (pre_p, storestmt); 7798 if (OMP_ATOMIC_SEQ_CST (*expr_p)) 7799 { 7800 gimple_omp_atomic_set_seq_cst (loadstmt); 7801 gimple_omp_atomic_set_seq_cst (storestmt); 7802 } 7803 switch (TREE_CODE (*expr_p)) 7804 { 7805 case OMP_ATOMIC_READ: 7806 case OMP_ATOMIC_CAPTURE_OLD: 7807 *expr_p = tmp_load; 7808 gimple_omp_atomic_set_need_value (loadstmt); 7809 break; 7810 case OMP_ATOMIC_CAPTURE_NEW: 7811 *expr_p = rhs; 7812 gimple_omp_atomic_set_need_value (storestmt); 7813 break; 7814 default: 7815 *expr_p = NULL; 7816 break; 7817 } 7818 7819 return GS_ALL_DONE; 7820 } 7821 7822 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the 7823 body, and adding some EH bits. */ 7824 7825 static enum gimplify_status 7826 gimplify_transaction (tree *expr_p, gimple_seq *pre_p) 7827 { 7828 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr); 7829 gimple body_stmt; 7830 gtransaction *trans_stmt; 7831 gimple_seq body = NULL; 7832 int subcode = 0; 7833 7834 /* Wrap the transaction body in a BIND_EXPR so we have a context 7835 where to put decls for OMP. */ 7836 if (TREE_CODE (tbody) != BIND_EXPR) 7837 { 7838 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL); 7839 TREE_SIDE_EFFECTS (bind) = 1; 7840 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody)); 7841 TRANSACTION_EXPR_BODY (expr) = bind; 7842 } 7843 7844 push_gimplify_context (); 7845 temp = voidify_wrapper_expr (*expr_p, NULL); 7846 7847 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body); 7848 pop_gimplify_context (body_stmt); 7849 7850 trans_stmt = gimple_build_transaction (body, NULL); 7851 if (TRANSACTION_EXPR_OUTER (expr)) 7852 subcode = GTMA_IS_OUTER; 7853 else if (TRANSACTION_EXPR_RELAXED (expr)) 7854 subcode = GTMA_IS_RELAXED; 7855 gimple_transaction_set_subcode (trans_stmt, subcode); 7856 7857 gimplify_seq_add_stmt (pre_p, trans_stmt); 7858 7859 if (temp) 7860 { 7861 *expr_p = temp; 7862 return GS_OK; 7863 } 7864 7865 *expr_p = NULL_TREE; 7866 return GS_ALL_DONE; 7867 } 7868 7869 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the 7870 expression produces a value to be used as an operand inside a GIMPLE 7871 statement, the value will be stored back in *EXPR_P. This value will 7872 be a tree of class tcc_declaration, tcc_constant, tcc_reference or 7873 an SSA_NAME. The corresponding sequence of GIMPLE statements is 7874 emitted in PRE_P and POST_P. 7875 7876 Additionally, this process may overwrite parts of the input 7877 expression during gimplification. Ideally, it should be 7878 possible to do non-destructive gimplification. 7879 7880 EXPR_P points to the GENERIC expression to convert to GIMPLE. If 7881 the expression needs to evaluate to a value to be used as 7882 an operand in a GIMPLE statement, this value will be stored in 7883 *EXPR_P on exit. This happens when the caller specifies one 7884 of fb_lvalue or fb_rvalue fallback flags. 7885 7886 PRE_P will contain the sequence of GIMPLE statements corresponding 7887 to the evaluation of EXPR and all the side-effects that must 7888 be executed before the main expression. On exit, the last 7889 statement of PRE_P is the core statement being gimplified. For 7890 instance, when gimplifying 'if (++a)' the last statement in 7891 PRE_P will be 'if (t.1)' where t.1 is the result of 7892 pre-incrementing 'a'. 7893 7894 POST_P will contain the sequence of GIMPLE statements corresponding 7895 to the evaluation of all the side-effects that must be executed 7896 after the main expression. If this is NULL, the post 7897 side-effects are stored at the end of PRE_P. 7898 7899 The reason why the output is split in two is to handle post 7900 side-effects explicitly. In some cases, an expression may have 7901 inner and outer post side-effects which need to be emitted in 7902 an order different from the one given by the recursive 7903 traversal. For instance, for the expression (*p--)++ the post 7904 side-effects of '--' must actually occur *after* the post 7905 side-effects of '++'. However, gimplification will first visit 7906 the inner expression, so if a separate POST sequence was not 7907 used, the resulting sequence would be: 7908 7909 1 t.1 = *p 7910 2 p = p - 1 7911 3 t.2 = t.1 + 1 7912 4 *p = t.2 7913 7914 However, the post-decrement operation in line #2 must not be 7915 evaluated until after the store to *p at line #4, so the 7916 correct sequence should be: 7917 7918 1 t.1 = *p 7919 2 t.2 = t.1 + 1 7920 3 *p = t.2 7921 4 p = p - 1 7922 7923 So, by specifying a separate post queue, it is possible 7924 to emit the post side-effects in the correct order. 7925 If POST_P is NULL, an internal queue will be used. Before 7926 returning to the caller, the sequence POST_P is appended to 7927 the main output sequence PRE_P. 7928 7929 GIMPLE_TEST_F points to a function that takes a tree T and 7930 returns nonzero if T is in the GIMPLE form requested by the 7931 caller. The GIMPLE predicates are in gimple.c. 7932 7933 FALLBACK tells the function what sort of a temporary we want if 7934 gimplification cannot produce an expression that complies with 7935 GIMPLE_TEST_F. 7936 7937 fb_none means that no temporary should be generated 7938 fb_rvalue means that an rvalue is OK to generate 7939 fb_lvalue means that an lvalue is OK to generate 7940 fb_either means that either is OK, but an lvalue is preferable. 7941 fb_mayfail means that gimplification may fail (in which case 7942 GS_ERROR will be returned) 7943 7944 The return value is either GS_ERROR or GS_ALL_DONE, since this 7945 function iterates until EXPR is completely gimplified or an error 7946 occurs. */ 7947 7948 enum gimplify_status 7949 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 7950 bool (*gimple_test_f) (tree), fallback_t fallback) 7951 { 7952 tree tmp; 7953 gimple_seq internal_pre = NULL; 7954 gimple_seq internal_post = NULL; 7955 tree save_expr; 7956 bool is_statement; 7957 location_t saved_location; 7958 enum gimplify_status ret; 7959 gimple_stmt_iterator pre_last_gsi, post_last_gsi; 7960 7961 save_expr = *expr_p; 7962 if (save_expr == NULL_TREE) 7963 return GS_ALL_DONE; 7964 7965 /* If we are gimplifying a top-level statement, PRE_P must be valid. */ 7966 is_statement = gimple_test_f == is_gimple_stmt; 7967 if (is_statement) 7968 gcc_assert (pre_p); 7969 7970 /* Consistency checks. */ 7971 if (gimple_test_f == is_gimple_reg) 7972 gcc_assert (fallback & (fb_rvalue | fb_lvalue)); 7973 else if (gimple_test_f == is_gimple_val 7974 || gimple_test_f == is_gimple_call_addr 7975 || gimple_test_f == is_gimple_condexpr 7976 || gimple_test_f == is_gimple_mem_rhs 7977 || gimple_test_f == is_gimple_mem_rhs_or_call 7978 || gimple_test_f == is_gimple_reg_rhs 7979 || gimple_test_f == is_gimple_reg_rhs_or_call 7980 || gimple_test_f == is_gimple_asm_val 7981 || gimple_test_f == is_gimple_mem_ref_addr) 7982 gcc_assert (fallback & fb_rvalue); 7983 else if (gimple_test_f == is_gimple_min_lval 7984 || gimple_test_f == is_gimple_lvalue) 7985 gcc_assert (fallback & fb_lvalue); 7986 else if (gimple_test_f == is_gimple_addressable) 7987 gcc_assert (fallback & fb_either); 7988 else if (gimple_test_f == is_gimple_stmt) 7989 gcc_assert (fallback == fb_none); 7990 else 7991 { 7992 /* We should have recognized the GIMPLE_TEST_F predicate to 7993 know what kind of fallback to use in case a temporary is 7994 needed to hold the value or address of *EXPR_P. */ 7995 gcc_unreachable (); 7996 } 7997 7998 /* We used to check the predicate here and return immediately if it 7999 succeeds. This is wrong; the design is for gimplification to be 8000 idempotent, and for the predicates to only test for valid forms, not 8001 whether they are fully simplified. */ 8002 if (pre_p == NULL) 8003 pre_p = &internal_pre; 8004 8005 if (post_p == NULL) 8006 post_p = &internal_post; 8007 8008 /* Remember the last statements added to PRE_P and POST_P. Every 8009 new statement added by the gimplification helpers needs to be 8010 annotated with location information. To centralize the 8011 responsibility, we remember the last statement that had been 8012 added to both queues before gimplifying *EXPR_P. If 8013 gimplification produces new statements in PRE_P and POST_P, those 8014 statements will be annotated with the same location information 8015 as *EXPR_P. */ 8016 pre_last_gsi = gsi_last (*pre_p); 8017 post_last_gsi = gsi_last (*post_p); 8018 8019 saved_location = input_location; 8020 if (save_expr != error_mark_node 8021 && EXPR_HAS_LOCATION (*expr_p)) 8022 input_location = EXPR_LOCATION (*expr_p); 8023 8024 /* Loop over the specific gimplifiers until the toplevel node 8025 remains the same. */ 8026 do 8027 { 8028 /* Strip away as many useless type conversions as possible 8029 at the toplevel. */ 8030 STRIP_USELESS_TYPE_CONVERSION (*expr_p); 8031 8032 /* Remember the expr. */ 8033 save_expr = *expr_p; 8034 8035 /* Die, die, die, my darling. */ 8036 if (save_expr == error_mark_node 8037 || (TREE_TYPE (save_expr) 8038 && TREE_TYPE (save_expr) == error_mark_node)) 8039 { 8040 ret = GS_ERROR; 8041 break; 8042 } 8043 8044 /* Do any language-specific gimplification. */ 8045 ret = ((enum gimplify_status) 8046 lang_hooks.gimplify_expr (expr_p, pre_p, post_p)); 8047 if (ret == GS_OK) 8048 { 8049 if (*expr_p == NULL_TREE) 8050 break; 8051 if (*expr_p != save_expr) 8052 continue; 8053 } 8054 else if (ret != GS_UNHANDLED) 8055 break; 8056 8057 /* Make sure that all the cases set 'ret' appropriately. */ 8058 ret = GS_UNHANDLED; 8059 switch (TREE_CODE (*expr_p)) 8060 { 8061 /* First deal with the special cases. */ 8062 8063 case POSTINCREMENT_EXPR: 8064 case POSTDECREMENT_EXPR: 8065 case PREINCREMENT_EXPR: 8066 case PREDECREMENT_EXPR: 8067 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p, 8068 fallback != fb_none, 8069 TREE_TYPE (*expr_p)); 8070 break; 8071 8072 case VIEW_CONVERT_EXPR: 8073 if (is_gimple_reg_type (TREE_TYPE (*expr_p)) 8074 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0)))) 8075 { 8076 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 8077 post_p, is_gimple_val, fb_rvalue); 8078 recalculate_side_effects (*expr_p); 8079 break; 8080 } 8081 /* Fallthru. */ 8082 8083 case ARRAY_REF: 8084 case ARRAY_RANGE_REF: 8085 case REALPART_EXPR: 8086 case IMAGPART_EXPR: 8087 case COMPONENT_REF: 8088 ret = gimplify_compound_lval (expr_p, pre_p, post_p, 8089 fallback ? fallback : fb_rvalue); 8090 break; 8091 8092 case COND_EXPR: 8093 ret = gimplify_cond_expr (expr_p, pre_p, fallback); 8094 8095 /* C99 code may assign to an array in a structure value of a 8096 conditional expression, and this has undefined behavior 8097 only on execution, so create a temporary if an lvalue is 8098 required. */ 8099 if (fallback == fb_lvalue) 8100 { 8101 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); 8102 mark_addressable (*expr_p); 8103 ret = GS_OK; 8104 } 8105 break; 8106 8107 case CALL_EXPR: 8108 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none); 8109 8110 /* C99 code may assign to an array in a structure returned 8111 from a function, and this has undefined behavior only on 8112 execution, so create a temporary if an lvalue is 8113 required. */ 8114 if (fallback == fb_lvalue) 8115 { 8116 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); 8117 mark_addressable (*expr_p); 8118 ret = GS_OK; 8119 } 8120 break; 8121 8122 case TREE_LIST: 8123 gcc_unreachable (); 8124 8125 case COMPOUND_EXPR: 8126 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none); 8127 break; 8128 8129 case COMPOUND_LITERAL_EXPR: 8130 ret = gimplify_compound_literal_expr (expr_p, pre_p, 8131 gimple_test_f, fallback); 8132 break; 8133 8134 case MODIFY_EXPR: 8135 case INIT_EXPR: 8136 ret = gimplify_modify_expr (expr_p, pre_p, post_p, 8137 fallback != fb_none); 8138 break; 8139 8140 case TRUTH_ANDIF_EXPR: 8141 case TRUTH_ORIF_EXPR: 8142 { 8143 /* Preserve the original type of the expression and the 8144 source location of the outer expression. */ 8145 tree org_type = TREE_TYPE (*expr_p); 8146 *expr_p = gimple_boolify (*expr_p); 8147 *expr_p = build3_loc (input_location, COND_EXPR, 8148 org_type, *expr_p, 8149 fold_convert_loc 8150 (input_location, 8151 org_type, boolean_true_node), 8152 fold_convert_loc 8153 (input_location, 8154 org_type, boolean_false_node)); 8155 ret = GS_OK; 8156 break; 8157 } 8158 8159 case TRUTH_NOT_EXPR: 8160 { 8161 tree type = TREE_TYPE (*expr_p); 8162 /* The parsers are careful to generate TRUTH_NOT_EXPR 8163 only with operands that are always zero or one. 8164 We do not fold here but handle the only interesting case 8165 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */ 8166 *expr_p = gimple_boolify (*expr_p); 8167 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1) 8168 *expr_p = build1_loc (input_location, BIT_NOT_EXPR, 8169 TREE_TYPE (*expr_p), 8170 TREE_OPERAND (*expr_p, 0)); 8171 else 8172 *expr_p = build2_loc (input_location, BIT_XOR_EXPR, 8173 TREE_TYPE (*expr_p), 8174 TREE_OPERAND (*expr_p, 0), 8175 build_int_cst (TREE_TYPE (*expr_p), 1)); 8176 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p))) 8177 *expr_p = fold_convert_loc (input_location, type, *expr_p); 8178 ret = GS_OK; 8179 break; 8180 } 8181 8182 case ADDR_EXPR: 8183 ret = gimplify_addr_expr (expr_p, pre_p, post_p); 8184 break; 8185 8186 case ANNOTATE_EXPR: 8187 { 8188 tree cond = TREE_OPERAND (*expr_p, 0); 8189 tree kind = TREE_OPERAND (*expr_p, 1); 8190 tree type = TREE_TYPE (cond); 8191 if (!INTEGRAL_TYPE_P (type)) 8192 { 8193 *expr_p = cond; 8194 ret = GS_OK; 8195 break; 8196 } 8197 tree tmp = create_tmp_var (type); 8198 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p)); 8199 gcall *call 8200 = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind); 8201 gimple_call_set_lhs (call, tmp); 8202 gimplify_seq_add_stmt (pre_p, call); 8203 *expr_p = tmp; 8204 ret = GS_ALL_DONE; 8205 break; 8206 } 8207 8208 case VA_ARG_EXPR: 8209 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p); 8210 break; 8211 8212 CASE_CONVERT: 8213 if (IS_EMPTY_STMT (*expr_p)) 8214 { 8215 ret = GS_ALL_DONE; 8216 break; 8217 } 8218 8219 if (VOID_TYPE_P (TREE_TYPE (*expr_p)) 8220 || fallback == fb_none) 8221 { 8222 /* Just strip a conversion to void (or in void context) and 8223 try again. */ 8224 *expr_p = TREE_OPERAND (*expr_p, 0); 8225 ret = GS_OK; 8226 break; 8227 } 8228 8229 ret = gimplify_conversion (expr_p); 8230 if (ret == GS_ERROR) 8231 break; 8232 if (*expr_p != save_expr) 8233 break; 8234 /* FALLTHRU */ 8235 8236 case FIX_TRUNC_EXPR: 8237 /* unary_expr: ... | '(' cast ')' val | ... */ 8238 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 8239 is_gimple_val, fb_rvalue); 8240 recalculate_side_effects (*expr_p); 8241 break; 8242 8243 case INDIRECT_REF: 8244 { 8245 bool volatilep = TREE_THIS_VOLATILE (*expr_p); 8246 bool notrap = TREE_THIS_NOTRAP (*expr_p); 8247 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0)); 8248 8249 *expr_p = fold_indirect_ref_loc (input_location, *expr_p); 8250 if (*expr_p != save_expr) 8251 { 8252 ret = GS_OK; 8253 break; 8254 } 8255 8256 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 8257 is_gimple_reg, fb_rvalue); 8258 if (ret == GS_ERROR) 8259 break; 8260 8261 recalculate_side_effects (*expr_p); 8262 *expr_p = fold_build2_loc (input_location, MEM_REF, 8263 TREE_TYPE (*expr_p), 8264 TREE_OPERAND (*expr_p, 0), 8265 build_int_cst (saved_ptr_type, 0)); 8266 TREE_THIS_VOLATILE (*expr_p) = volatilep; 8267 TREE_THIS_NOTRAP (*expr_p) = notrap; 8268 ret = GS_OK; 8269 break; 8270 } 8271 8272 /* We arrive here through the various re-gimplifcation paths. */ 8273 case MEM_REF: 8274 /* First try re-folding the whole thing. */ 8275 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p), 8276 TREE_OPERAND (*expr_p, 0), 8277 TREE_OPERAND (*expr_p, 1)); 8278 if (tmp) 8279 { 8280 *expr_p = tmp; 8281 recalculate_side_effects (*expr_p); 8282 ret = GS_OK; 8283 break; 8284 } 8285 /* Avoid re-gimplifying the address operand if it is already 8286 in suitable form. Re-gimplifying would mark the address 8287 operand addressable. Always gimplify when not in SSA form 8288 as we still may have to gimplify decls with value-exprs. */ 8289 if (!gimplify_ctxp || !gimplify_ctxp->into_ssa 8290 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0))) 8291 { 8292 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 8293 is_gimple_mem_ref_addr, fb_rvalue); 8294 if (ret == GS_ERROR) 8295 break; 8296 } 8297 recalculate_side_effects (*expr_p); 8298 ret = GS_ALL_DONE; 8299 break; 8300 8301 /* Constants need not be gimplified. */ 8302 case INTEGER_CST: 8303 case REAL_CST: 8304 case FIXED_CST: 8305 case STRING_CST: 8306 case COMPLEX_CST: 8307 case VECTOR_CST: 8308 /* Drop the overflow flag on constants, we do not want 8309 that in the GIMPLE IL. */ 8310 if (TREE_OVERFLOW_P (*expr_p)) 8311 *expr_p = drop_tree_overflow (*expr_p); 8312 ret = GS_ALL_DONE; 8313 break; 8314 8315 case CONST_DECL: 8316 /* If we require an lvalue, such as for ADDR_EXPR, retain the 8317 CONST_DECL node. Otherwise the decl is replaceable by its 8318 value. */ 8319 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */ 8320 if (fallback & fb_lvalue) 8321 ret = GS_ALL_DONE; 8322 else 8323 { 8324 *expr_p = DECL_INITIAL (*expr_p); 8325 ret = GS_OK; 8326 } 8327 break; 8328 8329 case DECL_EXPR: 8330 ret = gimplify_decl_expr (expr_p, pre_p); 8331 break; 8332 8333 case BIND_EXPR: 8334 ret = gimplify_bind_expr (expr_p, pre_p); 8335 break; 8336 8337 case LOOP_EXPR: 8338 ret = gimplify_loop_expr (expr_p, pre_p); 8339 break; 8340 8341 case SWITCH_EXPR: 8342 ret = gimplify_switch_expr (expr_p, pre_p); 8343 break; 8344 8345 case EXIT_EXPR: 8346 ret = gimplify_exit_expr (expr_p); 8347 break; 8348 8349 case GOTO_EXPR: 8350 /* If the target is not LABEL, then it is a computed jump 8351 and the target needs to be gimplified. */ 8352 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL) 8353 { 8354 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p, 8355 NULL, is_gimple_val, fb_rvalue); 8356 if (ret == GS_ERROR) 8357 break; 8358 } 8359 gimplify_seq_add_stmt (pre_p, 8360 gimple_build_goto (GOTO_DESTINATION (*expr_p))); 8361 ret = GS_ALL_DONE; 8362 break; 8363 8364 case PREDICT_EXPR: 8365 gimplify_seq_add_stmt (pre_p, 8366 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p), 8367 PREDICT_EXPR_OUTCOME (*expr_p))); 8368 ret = GS_ALL_DONE; 8369 break; 8370 8371 case LABEL_EXPR: 8372 ret = GS_ALL_DONE; 8373 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p)) 8374 == current_function_decl); 8375 gimplify_seq_add_stmt (pre_p, 8376 gimple_build_label (LABEL_EXPR_LABEL (*expr_p))); 8377 break; 8378 8379 case CASE_LABEL_EXPR: 8380 ret = gimplify_case_label_expr (expr_p, pre_p); 8381 break; 8382 8383 case RETURN_EXPR: 8384 ret = gimplify_return_expr (*expr_p, pre_p); 8385 break; 8386 8387 case CONSTRUCTOR: 8388 /* Don't reduce this in place; let gimplify_init_constructor work its 8389 magic. Buf if we're just elaborating this for side effects, just 8390 gimplify any element that has side-effects. */ 8391 if (fallback == fb_none) 8392 { 8393 unsigned HOST_WIDE_INT ix; 8394 tree val; 8395 tree temp = NULL_TREE; 8396 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val) 8397 if (TREE_SIDE_EFFECTS (val)) 8398 append_to_statement_list (val, &temp); 8399 8400 *expr_p = temp; 8401 ret = temp ? GS_OK : GS_ALL_DONE; 8402 } 8403 /* C99 code may assign to an array in a constructed 8404 structure or union, and this has undefined behavior only 8405 on execution, so create a temporary if an lvalue is 8406 required. */ 8407 else if (fallback == fb_lvalue) 8408 { 8409 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p); 8410 mark_addressable (*expr_p); 8411 ret = GS_OK; 8412 } 8413 else 8414 ret = GS_ALL_DONE; 8415 break; 8416 8417 /* The following are special cases that are not handled by the 8418 original GIMPLE grammar. */ 8419 8420 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and 8421 eliminated. */ 8422 case SAVE_EXPR: 8423 ret = gimplify_save_expr (expr_p, pre_p, post_p); 8424 break; 8425 8426 case BIT_FIELD_REF: 8427 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 8428 post_p, is_gimple_lvalue, fb_either); 8429 recalculate_side_effects (*expr_p); 8430 break; 8431 8432 case TARGET_MEM_REF: 8433 { 8434 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE; 8435 8436 if (TMR_BASE (*expr_p)) 8437 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p, 8438 post_p, is_gimple_mem_ref_addr, fb_either); 8439 if (TMR_INDEX (*expr_p)) 8440 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p, 8441 post_p, is_gimple_val, fb_rvalue); 8442 if (TMR_INDEX2 (*expr_p)) 8443 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p, 8444 post_p, is_gimple_val, fb_rvalue); 8445 /* TMR_STEP and TMR_OFFSET are always integer constants. */ 8446 ret = MIN (r0, r1); 8447 } 8448 break; 8449 8450 case NON_LVALUE_EXPR: 8451 /* This should have been stripped above. */ 8452 gcc_unreachable (); 8453 8454 case ASM_EXPR: 8455 ret = gimplify_asm_expr (expr_p, pre_p, post_p); 8456 break; 8457 8458 case TRY_FINALLY_EXPR: 8459 case TRY_CATCH_EXPR: 8460 { 8461 gimple_seq eval, cleanup; 8462 gtry *try_; 8463 8464 /* Calls to destructors are generated automatically in FINALLY/CATCH 8465 block. They should have location as UNKNOWN_LOCATION. However, 8466 gimplify_call_expr will reset these call stmts to input_location 8467 if it finds stmt's location is unknown. To prevent resetting for 8468 destructors, we set the input_location to unknown. 8469 Note that this only affects the destructor calls in FINALLY/CATCH 8470 block, and will automatically reset to its original value by the 8471 end of gimplify_expr. */ 8472 input_location = UNKNOWN_LOCATION; 8473 eval = cleanup = NULL; 8474 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval); 8475 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup); 8476 /* Don't create bogus GIMPLE_TRY with empty cleanup. */ 8477 if (gimple_seq_empty_p (cleanup)) 8478 { 8479 gimple_seq_add_seq (pre_p, eval); 8480 ret = GS_ALL_DONE; 8481 break; 8482 } 8483 try_ = gimple_build_try (eval, cleanup, 8484 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR 8485 ? GIMPLE_TRY_FINALLY 8486 : GIMPLE_TRY_CATCH); 8487 if (EXPR_HAS_LOCATION (save_expr)) 8488 gimple_set_location (try_, EXPR_LOCATION (save_expr)); 8489 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION) 8490 gimple_set_location (try_, saved_location); 8491 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR) 8492 gimple_try_set_catch_is_cleanup (try_, 8493 TRY_CATCH_IS_CLEANUP (*expr_p)); 8494 gimplify_seq_add_stmt (pre_p, try_); 8495 ret = GS_ALL_DONE; 8496 break; 8497 } 8498 8499 case CLEANUP_POINT_EXPR: 8500 ret = gimplify_cleanup_point_expr (expr_p, pre_p); 8501 break; 8502 8503 case TARGET_EXPR: 8504 ret = gimplify_target_expr (expr_p, pre_p, post_p); 8505 break; 8506 8507 case CATCH_EXPR: 8508 { 8509 gimple c; 8510 gimple_seq handler = NULL; 8511 gimplify_and_add (CATCH_BODY (*expr_p), &handler); 8512 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler); 8513 gimplify_seq_add_stmt (pre_p, c); 8514 ret = GS_ALL_DONE; 8515 break; 8516 } 8517 8518 case EH_FILTER_EXPR: 8519 { 8520 gimple ehf; 8521 gimple_seq failure = NULL; 8522 8523 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure); 8524 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure); 8525 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p)); 8526 gimplify_seq_add_stmt (pre_p, ehf); 8527 ret = GS_ALL_DONE; 8528 break; 8529 } 8530 8531 case OBJ_TYPE_REF: 8532 { 8533 enum gimplify_status r0, r1; 8534 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, 8535 post_p, is_gimple_val, fb_rvalue); 8536 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, 8537 post_p, is_gimple_val, fb_rvalue); 8538 TREE_SIDE_EFFECTS (*expr_p) = 0; 8539 ret = MIN (r0, r1); 8540 } 8541 break; 8542 8543 case LABEL_DECL: 8544 /* We get here when taking the address of a label. We mark 8545 the label as "forced"; meaning it can never be removed and 8546 it is a potential target for any computed goto. */ 8547 FORCED_LABEL (*expr_p) = 1; 8548 ret = GS_ALL_DONE; 8549 break; 8550 8551 case STATEMENT_LIST: 8552 ret = gimplify_statement_list (expr_p, pre_p); 8553 break; 8554 8555 case WITH_SIZE_EXPR: 8556 { 8557 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 8558 post_p == &internal_post ? NULL : post_p, 8559 gimple_test_f, fallback); 8560 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, 8561 is_gimple_val, fb_rvalue); 8562 ret = GS_ALL_DONE; 8563 } 8564 break; 8565 8566 case VAR_DECL: 8567 case PARM_DECL: 8568 ret = gimplify_var_or_parm_decl (expr_p); 8569 break; 8570 8571 case RESULT_DECL: 8572 /* When within an OMP context, notice uses of variables. */ 8573 if (gimplify_omp_ctxp) 8574 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true); 8575 ret = GS_ALL_DONE; 8576 break; 8577 8578 case SSA_NAME: 8579 /* Allow callbacks into the gimplifier during optimization. */ 8580 ret = GS_ALL_DONE; 8581 break; 8582 8583 case OMP_PARALLEL: 8584 gimplify_omp_parallel (expr_p, pre_p); 8585 ret = GS_ALL_DONE; 8586 break; 8587 8588 case OMP_TASK: 8589 gimplify_omp_task (expr_p, pre_p); 8590 ret = GS_ALL_DONE; 8591 break; 8592 8593 case OMP_FOR: 8594 case OMP_SIMD: 8595 case CILK_SIMD: 8596 case CILK_FOR: 8597 case OMP_DISTRIBUTE: 8598 case OACC_LOOP: 8599 ret = gimplify_omp_for (expr_p, pre_p); 8600 break; 8601 8602 case OACC_CACHE: 8603 gimplify_oacc_cache (expr_p, pre_p); 8604 ret = GS_ALL_DONE; 8605 break; 8606 8607 case OACC_HOST_DATA: 8608 case OACC_DECLARE: 8609 sorry ("directive not yet implemented"); 8610 ret = GS_ALL_DONE; 8611 break; 8612 8613 case OACC_KERNELS: 8614 if (OACC_KERNELS_COMBINED (*expr_p)) 8615 sorry ("directive not yet implemented"); 8616 else 8617 gimplify_omp_workshare (expr_p, pre_p); 8618 ret = GS_ALL_DONE; 8619 break; 8620 8621 case OACC_PARALLEL: 8622 if (OACC_PARALLEL_COMBINED (*expr_p)) 8623 sorry ("directive not yet implemented"); 8624 else 8625 gimplify_omp_workshare (expr_p, pre_p); 8626 ret = GS_ALL_DONE; 8627 break; 8628 8629 case OACC_DATA: 8630 case OMP_SECTIONS: 8631 case OMP_SINGLE: 8632 case OMP_TARGET: 8633 case OMP_TARGET_DATA: 8634 case OMP_TEAMS: 8635 gimplify_omp_workshare (expr_p, pre_p); 8636 ret = GS_ALL_DONE; 8637 break; 8638 8639 case OACC_ENTER_DATA: 8640 case OACC_EXIT_DATA: 8641 case OACC_UPDATE: 8642 case OMP_TARGET_UPDATE: 8643 gimplify_omp_target_update (expr_p, pre_p); 8644 ret = GS_ALL_DONE; 8645 break; 8646 8647 case OMP_SECTION: 8648 case OMP_MASTER: 8649 case OMP_TASKGROUP: 8650 case OMP_ORDERED: 8651 case OMP_CRITICAL: 8652 { 8653 gimple_seq body = NULL; 8654 gimple g; 8655 8656 gimplify_and_add (OMP_BODY (*expr_p), &body); 8657 switch (TREE_CODE (*expr_p)) 8658 { 8659 case OMP_SECTION: 8660 g = gimple_build_omp_section (body); 8661 break; 8662 case OMP_MASTER: 8663 g = gimple_build_omp_master (body); 8664 break; 8665 case OMP_TASKGROUP: 8666 { 8667 gimple_seq cleanup = NULL; 8668 tree fn 8669 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END); 8670 g = gimple_build_call (fn, 0); 8671 gimple_seq_add_stmt (&cleanup, g); 8672 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY); 8673 body = NULL; 8674 gimple_seq_add_stmt (&body, g); 8675 g = gimple_build_omp_taskgroup (body); 8676 } 8677 break; 8678 case OMP_ORDERED: 8679 g = gimple_build_omp_ordered (body); 8680 break; 8681 case OMP_CRITICAL: 8682 g = gimple_build_omp_critical (body, 8683 OMP_CRITICAL_NAME (*expr_p)); 8684 break; 8685 default: 8686 gcc_unreachable (); 8687 } 8688 gimplify_seq_add_stmt (pre_p, g); 8689 ret = GS_ALL_DONE; 8690 break; 8691 } 8692 8693 case OMP_ATOMIC: 8694 case OMP_ATOMIC_READ: 8695 case OMP_ATOMIC_CAPTURE_OLD: 8696 case OMP_ATOMIC_CAPTURE_NEW: 8697 ret = gimplify_omp_atomic (expr_p, pre_p); 8698 break; 8699 8700 case TRANSACTION_EXPR: 8701 ret = gimplify_transaction (expr_p, pre_p); 8702 break; 8703 8704 case TRUTH_AND_EXPR: 8705 case TRUTH_OR_EXPR: 8706 case TRUTH_XOR_EXPR: 8707 { 8708 tree orig_type = TREE_TYPE (*expr_p); 8709 tree new_type, xop0, xop1; 8710 *expr_p = gimple_boolify (*expr_p); 8711 new_type = TREE_TYPE (*expr_p); 8712 if (!useless_type_conversion_p (orig_type, new_type)) 8713 { 8714 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p); 8715 ret = GS_OK; 8716 break; 8717 } 8718 8719 /* Boolified binary truth expressions are semantically equivalent 8720 to bitwise binary expressions. Canonicalize them to the 8721 bitwise variant. */ 8722 switch (TREE_CODE (*expr_p)) 8723 { 8724 case TRUTH_AND_EXPR: 8725 TREE_SET_CODE (*expr_p, BIT_AND_EXPR); 8726 break; 8727 case TRUTH_OR_EXPR: 8728 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR); 8729 break; 8730 case TRUTH_XOR_EXPR: 8731 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR); 8732 break; 8733 default: 8734 break; 8735 } 8736 /* Now make sure that operands have compatible type to 8737 expression's new_type. */ 8738 xop0 = TREE_OPERAND (*expr_p, 0); 8739 xop1 = TREE_OPERAND (*expr_p, 1); 8740 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0))) 8741 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location, 8742 new_type, 8743 xop0); 8744 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1))) 8745 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location, 8746 new_type, 8747 xop1); 8748 /* Continue classified as tcc_binary. */ 8749 goto expr_2; 8750 } 8751 8752 case FMA_EXPR: 8753 case VEC_COND_EXPR: 8754 case VEC_PERM_EXPR: 8755 /* Classified as tcc_expression. */ 8756 goto expr_3; 8757 8758 case POINTER_PLUS_EXPR: 8759 { 8760 enum gimplify_status r0, r1; 8761 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 8762 post_p, is_gimple_val, fb_rvalue); 8763 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 8764 post_p, is_gimple_val, fb_rvalue); 8765 recalculate_side_effects (*expr_p); 8766 ret = MIN (r0, r1); 8767 break; 8768 } 8769 8770 case CILK_SYNC_STMT: 8771 { 8772 if (!fn_contains_cilk_spawn_p (cfun)) 8773 { 8774 error_at (EXPR_LOCATION (*expr_p), 8775 "expected %<_Cilk_spawn%> before %<_Cilk_sync%>"); 8776 ret = GS_ERROR; 8777 } 8778 else 8779 { 8780 gimplify_cilk_sync (expr_p, pre_p); 8781 ret = GS_ALL_DONE; 8782 } 8783 break; 8784 } 8785 8786 default: 8787 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p))) 8788 { 8789 case tcc_comparison: 8790 /* Handle comparison of objects of non scalar mode aggregates 8791 with a call to memcmp. It would be nice to only have to do 8792 this for variable-sized objects, but then we'd have to allow 8793 the same nest of reference nodes we allow for MODIFY_EXPR and 8794 that's too complex. 8795 8796 Compare scalar mode aggregates as scalar mode values. Using 8797 memcmp for them would be very inefficient at best, and is 8798 plain wrong if bitfields are involved. */ 8799 { 8800 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1)); 8801 8802 /* Vector comparisons need no boolification. */ 8803 if (TREE_CODE (type) == VECTOR_TYPE) 8804 goto expr_2; 8805 else if (!AGGREGATE_TYPE_P (type)) 8806 { 8807 tree org_type = TREE_TYPE (*expr_p); 8808 *expr_p = gimple_boolify (*expr_p); 8809 if (!useless_type_conversion_p (org_type, 8810 TREE_TYPE (*expr_p))) 8811 { 8812 *expr_p = fold_convert_loc (input_location, 8813 org_type, *expr_p); 8814 ret = GS_OK; 8815 } 8816 else 8817 goto expr_2; 8818 } 8819 else if (TYPE_MODE (type) != BLKmode) 8820 ret = gimplify_scalar_mode_aggregate_compare (expr_p); 8821 else 8822 ret = gimplify_variable_sized_compare (expr_p); 8823 8824 break; 8825 } 8826 8827 /* If *EXPR_P does not need to be special-cased, handle it 8828 according to its class. */ 8829 case tcc_unary: 8830 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 8831 post_p, is_gimple_val, fb_rvalue); 8832 break; 8833 8834 case tcc_binary: 8835 expr_2: 8836 { 8837 enum gimplify_status r0, r1; 8838 8839 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 8840 post_p, is_gimple_val, fb_rvalue); 8841 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 8842 post_p, is_gimple_val, fb_rvalue); 8843 8844 ret = MIN (r0, r1); 8845 break; 8846 } 8847 8848 expr_3: 8849 { 8850 enum gimplify_status r0, r1, r2; 8851 8852 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, 8853 post_p, is_gimple_val, fb_rvalue); 8854 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, 8855 post_p, is_gimple_val, fb_rvalue); 8856 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, 8857 post_p, is_gimple_val, fb_rvalue); 8858 8859 ret = MIN (MIN (r0, r1), r2); 8860 break; 8861 } 8862 8863 case tcc_declaration: 8864 case tcc_constant: 8865 ret = GS_ALL_DONE; 8866 goto dont_recalculate; 8867 8868 default: 8869 gcc_unreachable (); 8870 } 8871 8872 recalculate_side_effects (*expr_p); 8873 8874 dont_recalculate: 8875 break; 8876 } 8877 8878 gcc_assert (*expr_p || ret != GS_OK); 8879 } 8880 while (ret == GS_OK); 8881 8882 /* If we encountered an error_mark somewhere nested inside, either 8883 stub out the statement or propagate the error back out. */ 8884 if (ret == GS_ERROR) 8885 { 8886 if (is_statement) 8887 *expr_p = NULL; 8888 goto out; 8889 } 8890 8891 /* This was only valid as a return value from the langhook, which 8892 we handled. Make sure it doesn't escape from any other context. */ 8893 gcc_assert (ret != GS_UNHANDLED); 8894 8895 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p)) 8896 { 8897 /* We aren't looking for a value, and we don't have a valid 8898 statement. If it doesn't have side-effects, throw it away. */ 8899 if (!TREE_SIDE_EFFECTS (*expr_p)) 8900 *expr_p = NULL; 8901 else if (!TREE_THIS_VOLATILE (*expr_p)) 8902 { 8903 /* This is probably a _REF that contains something nested that 8904 has side effects. Recurse through the operands to find it. */ 8905 enum tree_code code = TREE_CODE (*expr_p); 8906 8907 switch (code) 8908 { 8909 case COMPONENT_REF: 8910 case REALPART_EXPR: 8911 case IMAGPART_EXPR: 8912 case VIEW_CONVERT_EXPR: 8913 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 8914 gimple_test_f, fallback); 8915 break; 8916 8917 case ARRAY_REF: 8918 case ARRAY_RANGE_REF: 8919 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 8920 gimple_test_f, fallback); 8921 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p, 8922 gimple_test_f, fallback); 8923 break; 8924 8925 default: 8926 /* Anything else with side-effects must be converted to 8927 a valid statement before we get here. */ 8928 gcc_unreachable (); 8929 } 8930 8931 *expr_p = NULL; 8932 } 8933 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p)) 8934 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode) 8935 { 8936 /* Historically, the compiler has treated a bare reference 8937 to a non-BLKmode volatile lvalue as forcing a load. */ 8938 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p)); 8939 8940 /* Normally, we do not want to create a temporary for a 8941 TREE_ADDRESSABLE type because such a type should not be 8942 copied by bitwise-assignment. However, we make an 8943 exception here, as all we are doing here is ensuring that 8944 we read the bytes that make up the type. We use 8945 create_tmp_var_raw because create_tmp_var will abort when 8946 given a TREE_ADDRESSABLE type. */ 8947 tree tmp = create_tmp_var_raw (type, "vol"); 8948 gimple_add_tmp_var (tmp); 8949 gimplify_assign (tmp, *expr_p, pre_p); 8950 *expr_p = NULL; 8951 } 8952 else 8953 /* We can't do anything useful with a volatile reference to 8954 an incomplete type, so just throw it away. Likewise for 8955 a BLKmode type, since any implicit inner load should 8956 already have been turned into an explicit one by the 8957 gimplification process. */ 8958 *expr_p = NULL; 8959 } 8960 8961 /* If we are gimplifying at the statement level, we're done. Tack 8962 everything together and return. */ 8963 if (fallback == fb_none || is_statement) 8964 { 8965 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear 8966 it out for GC to reclaim it. */ 8967 *expr_p = NULL_TREE; 8968 8969 if (!gimple_seq_empty_p (internal_pre) 8970 || !gimple_seq_empty_p (internal_post)) 8971 { 8972 gimplify_seq_add_seq (&internal_pre, internal_post); 8973 gimplify_seq_add_seq (pre_p, internal_pre); 8974 } 8975 8976 /* The result of gimplifying *EXPR_P is going to be the last few 8977 statements in *PRE_P and *POST_P. Add location information 8978 to all the statements that were added by the gimplification 8979 helpers. */ 8980 if (!gimple_seq_empty_p (*pre_p)) 8981 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location); 8982 8983 if (!gimple_seq_empty_p (*post_p)) 8984 annotate_all_with_location_after (*post_p, post_last_gsi, 8985 input_location); 8986 8987 goto out; 8988 } 8989 8990 #ifdef ENABLE_GIMPLE_CHECKING 8991 if (*expr_p) 8992 { 8993 enum tree_code code = TREE_CODE (*expr_p); 8994 /* These expressions should already be in gimple IR form. */ 8995 gcc_assert (code != MODIFY_EXPR 8996 && code != ASM_EXPR 8997 && code != BIND_EXPR 8998 && code != CATCH_EXPR 8999 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr) 9000 && code != EH_FILTER_EXPR 9001 && code != GOTO_EXPR 9002 && code != LABEL_EXPR 9003 && code != LOOP_EXPR 9004 && code != SWITCH_EXPR 9005 && code != TRY_FINALLY_EXPR 9006 && code != OACC_PARALLEL 9007 && code != OACC_KERNELS 9008 && code != OACC_DATA 9009 && code != OACC_HOST_DATA 9010 && code != OACC_DECLARE 9011 && code != OACC_UPDATE 9012 && code != OACC_ENTER_DATA 9013 && code != OACC_EXIT_DATA 9014 && code != OACC_CACHE 9015 && code != OMP_CRITICAL 9016 && code != OMP_FOR 9017 && code != OACC_LOOP 9018 && code != OMP_MASTER 9019 && code != OMP_TASKGROUP 9020 && code != OMP_ORDERED 9021 && code != OMP_PARALLEL 9022 && code != OMP_SECTIONS 9023 && code != OMP_SECTION 9024 && code != OMP_SINGLE); 9025 } 9026 #endif 9027 9028 /* Otherwise we're gimplifying a subexpression, so the resulting 9029 value is interesting. If it's a valid operand that matches 9030 GIMPLE_TEST_F, we're done. Unless we are handling some 9031 post-effects internally; if that's the case, we need to copy into 9032 a temporary before adding the post-effects to POST_P. */ 9033 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p)) 9034 goto out; 9035 9036 /* Otherwise, we need to create a new temporary for the gimplified 9037 expression. */ 9038 9039 /* We can't return an lvalue if we have an internal postqueue. The 9040 object the lvalue refers to would (probably) be modified by the 9041 postqueue; we need to copy the value out first, which means an 9042 rvalue. */ 9043 if ((fallback & fb_lvalue) 9044 && gimple_seq_empty_p (internal_post) 9045 && is_gimple_addressable (*expr_p)) 9046 { 9047 /* An lvalue will do. Take the address of the expression, store it 9048 in a temporary, and replace the expression with an INDIRECT_REF of 9049 that temporary. */ 9050 tmp = build_fold_addr_expr_loc (input_location, *expr_p); 9051 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue); 9052 *expr_p = build_simple_mem_ref (tmp); 9053 } 9054 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p)) 9055 { 9056 /* An rvalue will do. Assign the gimplified expression into a 9057 new temporary TMP and replace the original expression with 9058 TMP. First, make sure that the expression has a type so that 9059 it can be assigned into a temporary. */ 9060 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p))); 9061 *expr_p = get_formal_tmp_var (*expr_p, pre_p); 9062 } 9063 else 9064 { 9065 #ifdef ENABLE_GIMPLE_CHECKING 9066 if (!(fallback & fb_mayfail)) 9067 { 9068 fprintf (stderr, "gimplification failed:\n"); 9069 print_generic_expr (stderr, *expr_p, 0); 9070 debug_tree (*expr_p); 9071 internal_error ("gimplification failed"); 9072 } 9073 #endif 9074 gcc_assert (fallback & fb_mayfail); 9075 9076 /* If this is an asm statement, and the user asked for the 9077 impossible, don't die. Fail and let gimplify_asm_expr 9078 issue an error. */ 9079 ret = GS_ERROR; 9080 goto out; 9081 } 9082 9083 /* Make sure the temporary matches our predicate. */ 9084 gcc_assert ((*gimple_test_f) (*expr_p)); 9085 9086 if (!gimple_seq_empty_p (internal_post)) 9087 { 9088 annotate_all_with_location (internal_post, input_location); 9089 gimplify_seq_add_seq (pre_p, internal_post); 9090 } 9091 9092 out: 9093 input_location = saved_location; 9094 return ret; 9095 } 9096 9097 /* Look through TYPE for variable-sized objects and gimplify each such 9098 size that we find. Add to LIST_P any statements generated. */ 9099 9100 void 9101 gimplify_type_sizes (tree type, gimple_seq *list_p) 9102 { 9103 tree field, t; 9104 9105 if (type == NULL || type == error_mark_node) 9106 return; 9107 9108 /* We first do the main variant, then copy into any other variants. */ 9109 type = TYPE_MAIN_VARIANT (type); 9110 9111 /* Avoid infinite recursion. */ 9112 if (TYPE_SIZES_GIMPLIFIED (type)) 9113 return; 9114 9115 TYPE_SIZES_GIMPLIFIED (type) = 1; 9116 9117 switch (TREE_CODE (type)) 9118 { 9119 case INTEGER_TYPE: 9120 case ENUMERAL_TYPE: 9121 case BOOLEAN_TYPE: 9122 case REAL_TYPE: 9123 case FIXED_POINT_TYPE: 9124 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p); 9125 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p); 9126 9127 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) 9128 { 9129 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type); 9130 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type); 9131 } 9132 break; 9133 9134 case ARRAY_TYPE: 9135 /* These types may not have declarations, so handle them here. */ 9136 gimplify_type_sizes (TREE_TYPE (type), list_p); 9137 gimplify_type_sizes (TYPE_DOMAIN (type), list_p); 9138 /* Ensure VLA bounds aren't removed, for -O0 they should be variables 9139 with assigned stack slots, for -O1+ -g they should be tracked 9140 by VTA. */ 9141 if (!(TYPE_NAME (type) 9142 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL 9143 && DECL_IGNORED_P (TYPE_NAME (type))) 9144 && TYPE_DOMAIN (type) 9145 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type))) 9146 { 9147 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type)); 9148 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t)) 9149 DECL_IGNORED_P (t) = 0; 9150 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); 9151 if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t)) 9152 DECL_IGNORED_P (t) = 0; 9153 } 9154 break; 9155 9156 case RECORD_TYPE: 9157 case UNION_TYPE: 9158 case QUAL_UNION_TYPE: 9159 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) 9160 if (TREE_CODE (field) == FIELD_DECL) 9161 { 9162 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p); 9163 gimplify_one_sizepos (&DECL_SIZE (field), list_p); 9164 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p); 9165 gimplify_type_sizes (TREE_TYPE (field), list_p); 9166 } 9167 break; 9168 9169 case POINTER_TYPE: 9170 case REFERENCE_TYPE: 9171 /* We used to recurse on the pointed-to type here, which turned out to 9172 be incorrect because its definition might refer to variables not 9173 yet initialized at this point if a forward declaration is involved. 9174 9175 It was actually useful for anonymous pointed-to types to ensure 9176 that the sizes evaluation dominates every possible later use of the 9177 values. Restricting to such types here would be safe since there 9178 is no possible forward declaration around, but would introduce an 9179 undesirable middle-end semantic to anonymity. We then defer to 9180 front-ends the responsibility of ensuring that the sizes are 9181 evaluated both early and late enough, e.g. by attaching artificial 9182 type declarations to the tree. */ 9183 break; 9184 9185 default: 9186 break; 9187 } 9188 9189 gimplify_one_sizepos (&TYPE_SIZE (type), list_p); 9190 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p); 9191 9192 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) 9193 { 9194 TYPE_SIZE (t) = TYPE_SIZE (type); 9195 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type); 9196 TYPE_SIZES_GIMPLIFIED (t) = 1; 9197 } 9198 } 9199 9200 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P, 9201 a size or position, has had all of its SAVE_EXPRs evaluated. 9202 We add any required statements to *STMT_P. */ 9203 9204 void 9205 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p) 9206 { 9207 tree expr = *expr_p; 9208 9209 /* We don't do anything if the value isn't there, is constant, or contains 9210 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already 9211 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier 9212 will want to replace it with a new variable, but that will cause problems 9213 if this type is from outside the function. It's OK to have that here. */ 9214 if (is_gimple_sizepos (expr)) 9215 return; 9216 9217 *expr_p = unshare_expr (expr); 9218 9219 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue); 9220 } 9221 9222 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node 9223 containing the sequence of corresponding GIMPLE statements. If DO_PARMS 9224 is true, also gimplify the parameters. */ 9225 9226 gbind * 9227 gimplify_body (tree fndecl, bool do_parms) 9228 { 9229 location_t saved_location = input_location; 9230 gimple_seq parm_stmts, seq; 9231 gimple outer_stmt; 9232 gbind *outer_bind; 9233 struct cgraph_node *cgn; 9234 9235 timevar_push (TV_TREE_GIMPLIFY); 9236 9237 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during 9238 gimplification. */ 9239 default_rtl_profile (); 9240 9241 gcc_assert (gimplify_ctxp == NULL); 9242 push_gimplify_context (); 9243 9244 if (flag_openacc || flag_openmp) 9245 { 9246 gcc_assert (gimplify_omp_ctxp == NULL); 9247 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl))) 9248 gimplify_omp_ctxp = new_omp_context (ORT_TARGET); 9249 } 9250 9251 /* Unshare most shared trees in the body and in that of any nested functions. 9252 It would seem we don't have to do this for nested functions because 9253 they are supposed to be output and then the outer function gimplified 9254 first, but the g++ front end doesn't always do it that way. */ 9255 unshare_body (fndecl); 9256 unvisit_body (fndecl); 9257 9258 cgn = cgraph_node::get (fndecl); 9259 if (cgn && cgn->origin) 9260 nonlocal_vlas = new hash_set<tree>; 9261 9262 /* Make sure input_location isn't set to something weird. */ 9263 input_location = DECL_SOURCE_LOCATION (fndecl); 9264 9265 /* Resolve callee-copies. This has to be done before processing 9266 the body so that DECL_VALUE_EXPR gets processed correctly. */ 9267 parm_stmts = do_parms ? gimplify_parameters () : NULL; 9268 9269 /* Gimplify the function's body. */ 9270 seq = NULL; 9271 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq); 9272 outer_stmt = gimple_seq_first_stmt (seq); 9273 if (!outer_stmt) 9274 { 9275 outer_stmt = gimple_build_nop (); 9276 gimplify_seq_add_stmt (&seq, outer_stmt); 9277 } 9278 9279 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is 9280 not the case, wrap everything in a GIMPLE_BIND to make it so. */ 9281 if (gimple_code (outer_stmt) == GIMPLE_BIND 9282 && gimple_seq_first (seq) == gimple_seq_last (seq)) 9283 outer_bind = as_a <gbind *> (outer_stmt); 9284 else 9285 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL); 9286 9287 DECL_SAVED_TREE (fndecl) = NULL_TREE; 9288 9289 /* If we had callee-copies statements, insert them at the beginning 9290 of the function and clear DECL_VALUE_EXPR_P on the parameters. */ 9291 if (!gimple_seq_empty_p (parm_stmts)) 9292 { 9293 tree parm; 9294 9295 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind)); 9296 gimple_bind_set_body (outer_bind, parm_stmts); 9297 9298 for (parm = DECL_ARGUMENTS (current_function_decl); 9299 parm; parm = DECL_CHAIN (parm)) 9300 if (DECL_HAS_VALUE_EXPR_P (parm)) 9301 { 9302 DECL_HAS_VALUE_EXPR_P (parm) = 0; 9303 DECL_IGNORED_P (parm) = 0; 9304 } 9305 } 9306 9307 if (nonlocal_vlas) 9308 { 9309 if (nonlocal_vla_vars) 9310 { 9311 /* tree-nested.c may later on call declare_vars (..., true); 9312 which relies on BLOCK_VARS chain to be the tail of the 9313 gimple_bind_vars chain. Ensure we don't violate that 9314 assumption. */ 9315 if (gimple_bind_block (outer_bind) 9316 == DECL_INITIAL (current_function_decl)) 9317 declare_vars (nonlocal_vla_vars, outer_bind, true); 9318 else 9319 BLOCK_VARS (DECL_INITIAL (current_function_decl)) 9320 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)), 9321 nonlocal_vla_vars); 9322 nonlocal_vla_vars = NULL_TREE; 9323 } 9324 delete nonlocal_vlas; 9325 nonlocal_vlas = NULL; 9326 } 9327 9328 if ((flag_openacc || flag_openmp || flag_openmp_simd) 9329 && gimplify_omp_ctxp) 9330 { 9331 delete_omp_context (gimplify_omp_ctxp); 9332 gimplify_omp_ctxp = NULL; 9333 } 9334 9335 pop_gimplify_context (outer_bind); 9336 gcc_assert (gimplify_ctxp == NULL); 9337 9338 #ifdef ENABLE_CHECKING 9339 if (!seen_error ()) 9340 verify_gimple_in_seq (gimple_bind_body (outer_bind)); 9341 #endif 9342 9343 timevar_pop (TV_TREE_GIMPLIFY); 9344 input_location = saved_location; 9345 9346 return outer_bind; 9347 } 9348 9349 typedef char *char_p; /* For DEF_VEC_P. */ 9350 9351 /* Return whether we should exclude FNDECL from instrumentation. */ 9352 9353 static bool 9354 flag_instrument_functions_exclude_p (tree fndecl) 9355 { 9356 vec<char_p> *v; 9357 9358 v = (vec<char_p> *) flag_instrument_functions_exclude_functions; 9359 if (v && v->length () > 0) 9360 { 9361 const char *name; 9362 int i; 9363 char *s; 9364 9365 name = lang_hooks.decl_printable_name (fndecl, 0); 9366 FOR_EACH_VEC_ELT (*v, i, s) 9367 if (strstr (name, s) != NULL) 9368 return true; 9369 } 9370 9371 v = (vec<char_p> *) flag_instrument_functions_exclude_files; 9372 if (v && v->length () > 0) 9373 { 9374 const char *name; 9375 int i; 9376 char *s; 9377 9378 name = DECL_SOURCE_FILE (fndecl); 9379 FOR_EACH_VEC_ELT (*v, i, s) 9380 if (strstr (name, s) != NULL) 9381 return true; 9382 } 9383 9384 return false; 9385 } 9386 9387 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL 9388 node for the function we want to gimplify. 9389 9390 Return the sequence of GIMPLE statements corresponding to the body 9391 of FNDECL. */ 9392 9393 void 9394 gimplify_function_tree (tree fndecl) 9395 { 9396 tree parm, ret; 9397 gimple_seq seq; 9398 gbind *bind; 9399 9400 gcc_assert (!gimple_body (fndecl)); 9401 9402 if (DECL_STRUCT_FUNCTION (fndecl)) 9403 push_cfun (DECL_STRUCT_FUNCTION (fndecl)); 9404 else 9405 push_struct_function (fndecl); 9406 9407 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm)) 9408 { 9409 /* Preliminarily mark non-addressed complex variables as eligible 9410 for promotion to gimple registers. We'll transform their uses 9411 as we find them. */ 9412 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE 9413 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE) 9414 && !TREE_THIS_VOLATILE (parm) 9415 && !needs_to_live_in_memory (parm)) 9416 DECL_GIMPLE_REG_P (parm) = 1; 9417 } 9418 9419 ret = DECL_RESULT (fndecl); 9420 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE 9421 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE) 9422 && !needs_to_live_in_memory (ret)) 9423 DECL_GIMPLE_REG_P (ret) = 1; 9424 9425 bind = gimplify_body (fndecl, true); 9426 9427 /* The tree body of the function is no longer needed, replace it 9428 with the new GIMPLE body. */ 9429 seq = NULL; 9430 gimple_seq_add_stmt (&seq, bind); 9431 gimple_set_body (fndecl, seq); 9432 9433 /* If we're instrumenting function entry/exit, then prepend the call to 9434 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to 9435 catch the exit hook. */ 9436 /* ??? Add some way to ignore exceptions for this TFE. */ 9437 if (flag_instrument_function_entry_exit 9438 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl) 9439 /* Do not instrument extern inline functions. */ 9440 && !(DECL_DECLARED_INLINE_P (fndecl) 9441 && DECL_EXTERNAL (fndecl) 9442 && DECL_DISREGARD_INLINE_LIMITS (fndecl)) 9443 && !flag_instrument_functions_exclude_p (fndecl)) 9444 { 9445 tree x; 9446 gbind *new_bind; 9447 gimple tf; 9448 gimple_seq cleanup = NULL, body = NULL; 9449 tree tmp_var; 9450 gcall *call; 9451 9452 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS); 9453 call = gimple_build_call (x, 1, integer_zero_node); 9454 tmp_var = create_tmp_var (ptr_type_node, "return_addr"); 9455 gimple_call_set_lhs (call, tmp_var); 9456 gimplify_seq_add_stmt (&cleanup, call); 9457 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT); 9458 call = gimple_build_call (x, 2, 9459 build_fold_addr_expr (current_function_decl), 9460 tmp_var); 9461 gimplify_seq_add_stmt (&cleanup, call); 9462 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY); 9463 9464 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS); 9465 call = gimple_build_call (x, 1, integer_zero_node); 9466 tmp_var = create_tmp_var (ptr_type_node, "return_addr"); 9467 gimple_call_set_lhs (call, tmp_var); 9468 gimplify_seq_add_stmt (&body, call); 9469 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER); 9470 call = gimple_build_call (x, 2, 9471 build_fold_addr_expr (current_function_decl), 9472 tmp_var); 9473 gimplify_seq_add_stmt (&body, call); 9474 gimplify_seq_add_stmt (&body, tf); 9475 new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind)); 9476 /* Clear the block for BIND, since it is no longer directly inside 9477 the function, but within a try block. */ 9478 gimple_bind_set_block (bind, NULL); 9479 9480 /* Replace the current function body with the body 9481 wrapped in the try/finally TF. */ 9482 seq = NULL; 9483 gimple_seq_add_stmt (&seq, new_bind); 9484 gimple_set_body (fndecl, seq); 9485 bind = new_bind; 9486 } 9487 9488 if ((flag_sanitize & SANITIZE_THREAD) != 0 9489 && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl))) 9490 { 9491 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0); 9492 gimple tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY); 9493 gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind)); 9494 /* Clear the block for BIND, since it is no longer directly inside 9495 the function, but within a try block. */ 9496 gimple_bind_set_block (bind, NULL); 9497 /* Replace the current function body with the body 9498 wrapped in the try/finally TF. */ 9499 seq = NULL; 9500 gimple_seq_add_stmt (&seq, new_bind); 9501 gimple_set_body (fndecl, seq); 9502 } 9503 9504 DECL_SAVED_TREE (fndecl) = NULL_TREE; 9505 cfun->curr_properties = PROP_gimple_any; 9506 9507 pop_cfun (); 9508 } 9509 9510 /* Return a dummy expression of type TYPE in order to keep going after an 9511 error. */ 9512 9513 static tree 9514 dummy_object (tree type) 9515 { 9516 tree t = build_int_cst (build_pointer_type (type), 0); 9517 return build2 (MEM_REF, type, t, t); 9518 } 9519 9520 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a 9521 builtin function, but a very special sort of operator. */ 9522 9523 enum gimplify_status 9524 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 9525 { 9526 tree promoted_type, have_va_type; 9527 tree valist = TREE_OPERAND (*expr_p, 0); 9528 tree type = TREE_TYPE (*expr_p); 9529 tree t; 9530 location_t loc = EXPR_LOCATION (*expr_p); 9531 9532 /* Verify that valist is of the proper type. */ 9533 have_va_type = TREE_TYPE (valist); 9534 if (have_va_type == error_mark_node) 9535 return GS_ERROR; 9536 have_va_type = targetm.canonical_va_list_type (have_va_type); 9537 9538 if (have_va_type == NULL_TREE) 9539 { 9540 error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>"); 9541 return GS_ERROR; 9542 } 9543 9544 /* Generate a diagnostic for requesting data of a type that cannot 9545 be passed through `...' due to type promotion at the call site. */ 9546 if ((promoted_type = lang_hooks.types.type_promotes_to (type)) 9547 != type) 9548 { 9549 static bool gave_help; 9550 bool warned; 9551 9552 /* Unfortunately, this is merely undefined, rather than a constraint 9553 violation, so we cannot make this an error. If this call is never 9554 executed, the program is still strictly conforming. */ 9555 warned = warning_at (loc, 0, 9556 "%qT is promoted to %qT when passed through %<...%>", 9557 type, promoted_type); 9558 if (!gave_help && warned) 9559 { 9560 gave_help = true; 9561 inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)", 9562 promoted_type, type); 9563 } 9564 9565 /* We can, however, treat "undefined" any way we please. 9566 Call abort to encourage the user to fix the program. */ 9567 if (warned) 9568 inform (loc, "if this code is reached, the program will abort"); 9569 /* Before the abort, allow the evaluation of the va_list 9570 expression to exit or longjmp. */ 9571 gimplify_and_add (valist, pre_p); 9572 t = build_call_expr_loc (loc, 9573 builtin_decl_implicit (BUILT_IN_TRAP), 0); 9574 gimplify_and_add (t, pre_p); 9575 9576 /* This is dead code, but go ahead and finish so that the 9577 mode of the result comes out right. */ 9578 *expr_p = dummy_object (type); 9579 return GS_ALL_DONE; 9580 } 9581 else 9582 { 9583 /* Make it easier for the backends by protecting the valist argument 9584 from multiple evaluations. */ 9585 if (TREE_CODE (have_va_type) == ARRAY_TYPE) 9586 { 9587 /* For this case, the backends will be expecting a pointer to 9588 TREE_TYPE (abi), but it's possible we've 9589 actually been given an array (an actual TARGET_FN_ABI_VA_LIST). 9590 So fix it. */ 9591 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE) 9592 { 9593 tree p1 = build_pointer_type (TREE_TYPE (have_va_type)); 9594 valist = fold_convert_loc (loc, p1, 9595 build_fold_addr_expr_loc (loc, valist)); 9596 } 9597 9598 gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue); 9599 } 9600 else 9601 gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue); 9602 9603 if (!targetm.gimplify_va_arg_expr) 9604 /* FIXME: Once most targets are converted we should merely 9605 assert this is non-null. */ 9606 return GS_ALL_DONE; 9607 9608 *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p); 9609 return GS_OK; 9610 } 9611 } 9612 9613 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P. 9614 9615 DST/SRC are the destination and source respectively. You can pass 9616 ungimplified trees in DST or SRC, in which case they will be 9617 converted to a gimple operand if necessary. 9618 9619 This function returns the newly created GIMPLE_ASSIGN tuple. */ 9620 9621 gimple 9622 gimplify_assign (tree dst, tree src, gimple_seq *seq_p) 9623 { 9624 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); 9625 gimplify_and_add (t, seq_p); 9626 ggc_free (t); 9627 return gimple_seq_last_stmt (*seq_p); 9628 } 9629 9630 inline hashval_t 9631 gimplify_hasher::hash (const value_type *p) 9632 { 9633 tree t = p->val; 9634 return iterative_hash_expr (t, 0); 9635 } 9636 9637 inline bool 9638 gimplify_hasher::equal (const value_type *p1, const compare_type *p2) 9639 { 9640 tree t1 = p1->val; 9641 tree t2 = p2->val; 9642 enum tree_code code = TREE_CODE (t1); 9643 9644 if (TREE_CODE (t2) != code 9645 || TREE_TYPE (t1) != TREE_TYPE (t2)) 9646 return false; 9647 9648 if (!operand_equal_p (t1, t2, 0)) 9649 return false; 9650 9651 #ifdef ENABLE_CHECKING 9652 /* Only allow them to compare equal if they also hash equal; otherwise 9653 results are nondeterminate, and we fail bootstrap comparison. */ 9654 gcc_assert (hash (p1) == hash (p2)); 9655 #endif 9656 9657 return true; 9658 } 9659