1 /* Lowering pass for OMP directives. Converts OMP directives into explicit 2 calls to the runtime library (libgomp), data marshalling to implement data 3 sharing and copying clauses, offloading to accelerators, and more. 4 5 Contributed by Diego Novillo <dnovillo@redhat.com> 6 7 Copyright (C) 2005-2020 Free Software Foundation, Inc. 8 9 This file is part of GCC. 10 11 GCC is free software; you can redistribute it and/or modify it under 12 the terms of the GNU General Public License as published by the Free 13 Software Foundation; either version 3, or (at your option) any later 14 version. 15 16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 17 WARRANTY; without even the implied warranty of MERCHANTABILITY or 18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 19 for more details. 20 21 You should have received a copy of the GNU General Public License 22 along with GCC; see the file COPYING3. If not see 23 <http://www.gnu.org/licenses/>. */ 24 25 #include "config.h" 26 #include "system.h" 27 #include "coretypes.h" 28 #include "backend.h" 29 #include "target.h" 30 #include "tree.h" 31 #include "gimple.h" 32 #include "tree-pass.h" 33 #include "ssa.h" 34 #include "cgraph.h" 35 #include "pretty-print.h" 36 #include "diagnostic-core.h" 37 #include "fold-const.h" 38 #include "stor-layout.h" 39 #include "internal-fn.h" 40 #include "gimple-fold.h" 41 #include "gimplify.h" 42 #include "gimple-iterator.h" 43 #include "gimplify-me.h" 44 #include "gimple-walk.h" 45 #include "tree-iterator.h" 46 #include "tree-inline.h" 47 #include "langhooks.h" 48 #include "tree-dfa.h" 49 #include "tree-ssa.h" 50 #include "splay-tree.h" 51 #include "omp-general.h" 52 #include "omp-low.h" 53 #include "omp-grid.h" 54 #include "gimple-low.h" 55 #include "alloc-pool.h" 56 #include "symbol-summary.h" 57 #include "tree-nested.h" 58 #include "context.h" 59 #include "gomp-constants.h" 60 #include "gimple-pretty-print.h" 61 #include "hsa-common.h" 62 #include "stringpool.h" 63 #include "attribs.h" 64 65 /* Lowering of OMP parallel and workshare constructs proceeds in two 66 phases. The first phase scans the function looking for OMP statements 67 and then for variables that must be replaced to satisfy data sharing 68 clauses. The second phase expands code for the constructs, as well as 69 re-gimplifying things when variables have been replaced with complex 70 expressions. 71 72 Final code generation is done by pass_expand_omp. The flowgraph is 73 scanned for regions which are then moved to a new 74 function, to be invoked by the thread library, or offloaded. */ 75 76 /* Context structure. Used to store information about each parallel 77 directive in the code. */ 78 79 struct omp_context 80 { 81 /* This field must be at the beginning, as we do "inheritance": Some 82 callback functions for tree-inline.c (e.g., omp_copy_decl) 83 receive a copy_body_data pointer that is up-casted to an 84 omp_context pointer. */ 85 copy_body_data cb; 86 87 /* The tree of contexts corresponding to the encountered constructs. */ 88 struct omp_context *outer; 89 gimple *stmt; 90 91 /* Map variables to fields in a structure that allows communication 92 between sending and receiving threads. */ 93 splay_tree field_map; 94 tree record_type; 95 tree sender_decl; 96 tree receiver_decl; 97 98 /* These are used just by task contexts, if task firstprivate fn is 99 needed. srecord_type is used to communicate from the thread 100 that encountered the task construct to task firstprivate fn, 101 record_type is allocated by GOMP_task, initialized by task firstprivate 102 fn and passed to the task body fn. */ 103 splay_tree sfield_map; 104 tree srecord_type; 105 106 /* A chain of variables to add to the top-level block surrounding the 107 construct. In the case of a parallel, this is in the child function. */ 108 tree block_vars; 109 110 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit 111 barriers should jump to during omplower pass. */ 112 tree cancel_label; 113 114 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL 115 otherwise. */ 116 gimple *simt_stmt; 117 118 /* For task reductions registered in this context, a vector containing 119 the length of the private copies block (if constant, otherwise NULL) 120 and then offsets (if constant, otherwise NULL) for each entry. */ 121 vec<tree> task_reductions; 122 123 /* A hash map from the reduction clauses to the registered array 124 elts. */ 125 hash_map<tree, unsigned> *task_reduction_map; 126 127 /* And a hash map from the lastprivate(conditional:) variables to their 128 corresponding tracking loop iteration variables. */ 129 hash_map<tree, tree> *lastprivate_conditional_map; 130 131 /* A tree_list of the reduction clauses in this context. This is 132 only used for checking the consistency of OpenACC reduction 133 clauses in scan_omp_for and is not guaranteed to contain a valid 134 value outside of this function. */ 135 tree local_reduction_clauses; 136 137 /* A tree_list of the reduction clauses in outer contexts. This is 138 only used for checking the consistency of OpenACC reduction 139 clauses in scan_omp_for and is not guaranteed to contain a valid 140 value outside of this function. */ 141 tree outer_reduction_clauses; 142 143 /* Nesting depth of this context. Used to beautify error messages re 144 invalid gotos. The outermost ctx is depth 1, with depth 0 being 145 reserved for the main body of the function. */ 146 int depth; 147 148 /* True if this parallel directive is nested within another. */ 149 bool is_nested; 150 151 /* True if this construct can be cancelled. */ 152 bool cancellable; 153 154 /* True if lower_omp_1 should look up lastprivate conditional in parent 155 context. */ 156 bool combined_into_simd_safelen1; 157 158 /* True if there is nested scan context with inclusive clause. */ 159 bool scan_inclusive; 160 161 /* True if there is nested scan context with exclusive clause. */ 162 bool scan_exclusive; 163 164 /* True in the second simd loop of for simd with inscan reductions. */ 165 bool for_simd_scan_phase; 166 167 /* True if there is order(concurrent) clause on the construct. */ 168 bool order_concurrent; 169 170 /* True if there is bind clause on the construct (i.e. a loop construct). */ 171 bool loop_p; 172 }; 173 174 static splay_tree all_contexts; 175 static int taskreg_nesting_level; 176 static int target_nesting_level; 177 static bitmap task_shared_vars; 178 static bitmap global_nonaddressable_vars; 179 static vec<omp_context *> taskreg_contexts; 180 static vec<gomp_task *> task_cpyfns; 181 182 static void scan_omp (gimple_seq *, omp_context *); 183 static tree scan_omp_1_op (tree *, int *, void *); 184 185 #define WALK_SUBSTMTS \ 186 case GIMPLE_BIND: \ 187 case GIMPLE_TRY: \ 188 case GIMPLE_CATCH: \ 189 case GIMPLE_EH_FILTER: \ 190 case GIMPLE_TRANSACTION: \ 191 /* The sub-statements for these should be walked. */ \ 192 *handled_ops_p = false; \ 193 break; 194 195 /* Return true if CTX corresponds to an OpenACC 'parallel' or 'serial' 196 region. */ 197 198 static bool 199 is_oacc_parallel_or_serial (omp_context *ctx) 200 { 201 enum gimple_code outer_type = gimple_code (ctx->stmt); 202 return ((outer_type == GIMPLE_OMP_TARGET) 203 && ((gimple_omp_target_kind (ctx->stmt) 204 == GF_OMP_TARGET_KIND_OACC_PARALLEL) 205 || (gimple_omp_target_kind (ctx->stmt) 206 == GF_OMP_TARGET_KIND_OACC_SERIAL))); 207 } 208 209 /* Return true if CTX corresponds to an oacc kernels region. */ 210 211 static bool 212 is_oacc_kernels (omp_context *ctx) 213 { 214 enum gimple_code outer_type = gimple_code (ctx->stmt); 215 return ((outer_type == GIMPLE_OMP_TARGET) 216 && (gimple_omp_target_kind (ctx->stmt) 217 == GF_OMP_TARGET_KIND_OACC_KERNELS)); 218 } 219 220 /* If DECL is the artificial dummy VAR_DECL created for non-static 221 data member privatization, return the underlying "this" parameter, 222 otherwise return NULL. */ 223 224 tree 225 omp_member_access_dummy_var (tree decl) 226 { 227 if (!VAR_P (decl) 228 || !DECL_ARTIFICIAL (decl) 229 || !DECL_IGNORED_P (decl) 230 || !DECL_HAS_VALUE_EXPR_P (decl) 231 || !lang_hooks.decls.omp_disregard_value_expr (decl, false)) 232 return NULL_TREE; 233 234 tree v = DECL_VALUE_EXPR (decl); 235 if (TREE_CODE (v) != COMPONENT_REF) 236 return NULL_TREE; 237 238 while (1) 239 switch (TREE_CODE (v)) 240 { 241 case COMPONENT_REF: 242 case MEM_REF: 243 case INDIRECT_REF: 244 CASE_CONVERT: 245 case POINTER_PLUS_EXPR: 246 v = TREE_OPERAND (v, 0); 247 continue; 248 case PARM_DECL: 249 if (DECL_CONTEXT (v) == current_function_decl 250 && DECL_ARTIFICIAL (v) 251 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE) 252 return v; 253 return NULL_TREE; 254 default: 255 return NULL_TREE; 256 } 257 } 258 259 /* Helper for unshare_and_remap, called through walk_tree. */ 260 261 static tree 262 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data) 263 { 264 tree *pair = (tree *) data; 265 if (*tp == pair[0]) 266 { 267 *tp = unshare_expr (pair[1]); 268 *walk_subtrees = 0; 269 } 270 else if (IS_TYPE_OR_DECL_P (*tp)) 271 *walk_subtrees = 0; 272 return NULL_TREE; 273 } 274 275 /* Return unshare_expr (X) with all occurrences of FROM 276 replaced with TO. */ 277 278 static tree 279 unshare_and_remap (tree x, tree from, tree to) 280 { 281 tree pair[2] = { from, to }; 282 x = unshare_expr (x); 283 walk_tree (&x, unshare_and_remap_1, pair, NULL); 284 return x; 285 } 286 287 /* Convenience function for calling scan_omp_1_op on tree operands. */ 288 289 static inline tree 290 scan_omp_op (tree *tp, omp_context *ctx) 291 { 292 struct walk_stmt_info wi; 293 294 memset (&wi, 0, sizeof (wi)); 295 wi.info = ctx; 296 wi.want_locations = true; 297 298 return walk_tree (tp, scan_omp_1_op, &wi, NULL); 299 } 300 301 static void lower_omp (gimple_seq *, omp_context *); 302 static tree lookup_decl_in_outer_ctx (tree, omp_context *); 303 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *); 304 305 /* Return true if CTX is for an omp parallel. */ 306 307 static inline bool 308 is_parallel_ctx (omp_context *ctx) 309 { 310 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL; 311 } 312 313 314 /* Return true if CTX is for an omp task. */ 315 316 static inline bool 317 is_task_ctx (omp_context *ctx) 318 { 319 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK; 320 } 321 322 323 /* Return true if CTX is for an omp taskloop. */ 324 325 static inline bool 326 is_taskloop_ctx (omp_context *ctx) 327 { 328 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR 329 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP; 330 } 331 332 333 /* Return true if CTX is for a host omp teams. */ 334 335 static inline bool 336 is_host_teams_ctx (omp_context *ctx) 337 { 338 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS 339 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt)); 340 } 341 342 /* Return true if CTX is for an omp parallel or omp task or host omp teams 343 (the last one is strictly not a task region in OpenMP speak, but we 344 need to treat it similarly). */ 345 346 static inline bool 347 is_taskreg_ctx (omp_context *ctx) 348 { 349 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx); 350 } 351 352 /* Return true if EXPR is variable sized. */ 353 354 static inline bool 355 is_variable_sized (const_tree expr) 356 { 357 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr))); 358 } 359 360 /* Lookup variables. The "maybe" form 361 allows for the variable form to not have been entered, otherwise we 362 assert that the variable must have been entered. */ 363 364 static inline tree 365 lookup_decl (tree var, omp_context *ctx) 366 { 367 tree *n = ctx->cb.decl_map->get (var); 368 return *n; 369 } 370 371 static inline tree 372 maybe_lookup_decl (const_tree var, omp_context *ctx) 373 { 374 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var)); 375 return n ? *n : NULL_TREE; 376 } 377 378 static inline tree 379 lookup_field (tree var, omp_context *ctx) 380 { 381 splay_tree_node n; 382 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var); 383 return (tree) n->value; 384 } 385 386 static inline tree 387 lookup_sfield (splay_tree_key key, omp_context *ctx) 388 { 389 splay_tree_node n; 390 n = splay_tree_lookup (ctx->sfield_map 391 ? ctx->sfield_map : ctx->field_map, key); 392 return (tree) n->value; 393 } 394 395 static inline tree 396 lookup_sfield (tree var, omp_context *ctx) 397 { 398 return lookup_sfield ((splay_tree_key) var, ctx); 399 } 400 401 static inline tree 402 maybe_lookup_field (splay_tree_key key, omp_context *ctx) 403 { 404 splay_tree_node n; 405 n = splay_tree_lookup (ctx->field_map, key); 406 return n ? (tree) n->value : NULL_TREE; 407 } 408 409 static inline tree 410 maybe_lookup_field (tree var, omp_context *ctx) 411 { 412 return maybe_lookup_field ((splay_tree_key) var, ctx); 413 } 414 415 /* Return true if DECL should be copied by pointer. SHARED_CTX is 416 the parallel context if DECL is to be shared. */ 417 418 static bool 419 use_pointer_for_field (tree decl, omp_context *shared_ctx) 420 { 421 if (AGGREGATE_TYPE_P (TREE_TYPE (decl)) 422 || TYPE_ATOMIC (TREE_TYPE (decl))) 423 return true; 424 425 /* We can only use copy-in/copy-out semantics for shared variables 426 when we know the value is not accessible from an outer scope. */ 427 if (shared_ctx) 428 { 429 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt)); 430 431 /* ??? Trivially accessible from anywhere. But why would we even 432 be passing an address in this case? Should we simply assert 433 this to be false, or should we have a cleanup pass that removes 434 these from the list of mappings? */ 435 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx))) 436 return true; 437 438 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell 439 without analyzing the expression whether or not its location 440 is accessible to anyone else. In the case of nested parallel 441 regions it certainly may be. */ 442 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl)) 443 return true; 444 445 /* Do not use copy-in/copy-out for variables that have their 446 address taken. */ 447 if (is_global_var (decl)) 448 { 449 /* For file scope vars, track whether we've seen them as 450 non-addressable initially and in that case, keep the same 451 answer for the duration of the pass, even when they are made 452 addressable later on e.g. through reduction expansion. Global 453 variables which weren't addressable before the pass will not 454 have their privatized copies address taken. See PR91216. */ 455 if (!TREE_ADDRESSABLE (decl)) 456 { 457 if (!global_nonaddressable_vars) 458 global_nonaddressable_vars = BITMAP_ALLOC (NULL); 459 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl)); 460 } 461 else if (!global_nonaddressable_vars 462 || !bitmap_bit_p (global_nonaddressable_vars, 463 DECL_UID (decl))) 464 return true; 465 } 466 else if (TREE_ADDRESSABLE (decl)) 467 return true; 468 469 /* lower_send_shared_vars only uses copy-in, but not copy-out 470 for these. */ 471 if (TREE_READONLY (decl) 472 || ((TREE_CODE (decl) == RESULT_DECL 473 || TREE_CODE (decl) == PARM_DECL) 474 && DECL_BY_REFERENCE (decl))) 475 return false; 476 477 /* Disallow copy-in/out in nested parallel if 478 decl is shared in outer parallel, otherwise 479 each thread could store the shared variable 480 in its own copy-in location, making the 481 variable no longer really shared. */ 482 if (shared_ctx->is_nested) 483 { 484 omp_context *up; 485 486 for (up = shared_ctx->outer; up; up = up->outer) 487 if ((is_taskreg_ctx (up) 488 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET 489 && is_gimple_omp_offloaded (up->stmt))) 490 && maybe_lookup_decl (decl, up)) 491 break; 492 493 if (up) 494 { 495 tree c; 496 497 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET) 498 { 499 for (c = gimple_omp_target_clauses (up->stmt); 500 c; c = OMP_CLAUSE_CHAIN (c)) 501 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 502 && OMP_CLAUSE_DECL (c) == decl) 503 break; 504 } 505 else 506 for (c = gimple_omp_taskreg_clauses (up->stmt); 507 c; c = OMP_CLAUSE_CHAIN (c)) 508 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED 509 && OMP_CLAUSE_DECL (c) == decl) 510 break; 511 512 if (c) 513 goto maybe_mark_addressable_and_ret; 514 } 515 } 516 517 /* For tasks avoid using copy-in/out. As tasks can be 518 deferred or executed in different thread, when GOMP_task 519 returns, the task hasn't necessarily terminated. */ 520 if (is_task_ctx (shared_ctx)) 521 { 522 tree outer; 523 maybe_mark_addressable_and_ret: 524 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx); 525 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer)) 526 { 527 /* Taking address of OUTER in lower_send_shared_vars 528 might need regimplification of everything that uses the 529 variable. */ 530 if (!task_shared_vars) 531 task_shared_vars = BITMAP_ALLOC (NULL); 532 bitmap_set_bit (task_shared_vars, DECL_UID (outer)); 533 TREE_ADDRESSABLE (outer) = 1; 534 } 535 return true; 536 } 537 } 538 539 return false; 540 } 541 542 /* Construct a new automatic decl similar to VAR. */ 543 544 static tree 545 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx) 546 { 547 tree copy = copy_var_decl (var, name, type); 548 549 DECL_CONTEXT (copy) = current_function_decl; 550 DECL_CHAIN (copy) = ctx->block_vars; 551 /* If VAR is listed in task_shared_vars, it means it wasn't 552 originally addressable and is just because task needs to take 553 it's address. But we don't need to take address of privatizations 554 from that var. */ 555 if (TREE_ADDRESSABLE (var) 556 && ((task_shared_vars 557 && bitmap_bit_p (task_shared_vars, DECL_UID (var))) 558 || (global_nonaddressable_vars 559 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var))))) 560 TREE_ADDRESSABLE (copy) = 0; 561 ctx->block_vars = copy; 562 563 return copy; 564 } 565 566 static tree 567 omp_copy_decl_1 (tree var, omp_context *ctx) 568 { 569 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx); 570 } 571 572 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it 573 as appropriate. */ 574 static tree 575 omp_build_component_ref (tree obj, tree field) 576 { 577 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL); 578 if (TREE_THIS_VOLATILE (field)) 579 TREE_THIS_VOLATILE (ret) |= 1; 580 if (TREE_READONLY (field)) 581 TREE_READONLY (ret) |= 1; 582 return ret; 583 } 584 585 /* Build tree nodes to access the field for VAR on the receiver side. */ 586 587 static tree 588 build_receiver_ref (tree var, bool by_ref, omp_context *ctx) 589 { 590 tree x, field = lookup_field (var, ctx); 591 592 /* If the receiver record type was remapped in the child function, 593 remap the field into the new record type. */ 594 x = maybe_lookup_field (field, ctx); 595 if (x != NULL) 596 field = x; 597 598 x = build_simple_mem_ref (ctx->receiver_decl); 599 TREE_THIS_NOTRAP (x) = 1; 600 x = omp_build_component_ref (x, field); 601 if (by_ref) 602 { 603 x = build_simple_mem_ref (x); 604 TREE_THIS_NOTRAP (x) = 1; 605 } 606 607 return x; 608 } 609 610 /* Build tree nodes to access VAR in the scope outer to CTX. In the case 611 of a parallel, this is a component reference; for workshare constructs 612 this is some variable. */ 613 614 static tree 615 build_outer_var_ref (tree var, omp_context *ctx, 616 enum omp_clause_code code = OMP_CLAUSE_ERROR) 617 { 618 tree x; 619 omp_context *outer = ctx->outer; 620 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP) 621 outer = outer->outer; 622 623 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))) 624 x = var; 625 else if (is_variable_sized (var)) 626 { 627 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0); 628 x = build_outer_var_ref (x, ctx, code); 629 x = build_simple_mem_ref (x); 630 } 631 else if (is_taskreg_ctx (ctx)) 632 { 633 bool by_ref = use_pointer_for_field (var, NULL); 634 x = build_receiver_ref (var, by_ref, ctx); 635 } 636 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR 637 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD) 638 || ctx->loop_p 639 || (code == OMP_CLAUSE_PRIVATE 640 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR 641 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS 642 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE))) 643 { 644 /* #pragma omp simd isn't a worksharing construct, and can reference 645 even private vars in its linear etc. clauses. 646 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer 647 to private vars in all worksharing constructs. */ 648 x = NULL_TREE; 649 if (outer && is_taskreg_ctx (outer)) 650 x = lookup_decl (var, outer); 651 else if (outer) 652 x = maybe_lookup_decl_in_outer_ctx (var, ctx); 653 if (x == NULL_TREE) 654 x = var; 655 } 656 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx)) 657 { 658 gcc_assert (outer); 659 splay_tree_node n 660 = splay_tree_lookup (outer->field_map, 661 (splay_tree_key) &DECL_UID (var)); 662 if (n == NULL) 663 { 664 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer))) 665 x = var; 666 else 667 x = lookup_decl (var, outer); 668 } 669 else 670 { 671 tree field = (tree) n->value; 672 /* If the receiver record type was remapped in the child function, 673 remap the field into the new record type. */ 674 x = maybe_lookup_field (field, outer); 675 if (x != NULL) 676 field = x; 677 678 x = build_simple_mem_ref (outer->receiver_decl); 679 x = omp_build_component_ref (x, field); 680 if (use_pointer_for_field (var, outer)) 681 x = build_simple_mem_ref (x); 682 } 683 } 684 else if (outer) 685 { 686 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY) 687 { 688 outer = outer->outer; 689 gcc_assert (outer 690 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY); 691 } 692 x = lookup_decl (var, outer); 693 } 694 else if (omp_is_reference (var)) 695 /* This can happen with orphaned constructs. If var is reference, it is 696 possible it is shared and as such valid. */ 697 x = var; 698 else if (omp_member_access_dummy_var (var)) 699 x = var; 700 else 701 gcc_unreachable (); 702 703 if (x == var) 704 { 705 tree t = omp_member_access_dummy_var (var); 706 if (t) 707 { 708 x = DECL_VALUE_EXPR (var); 709 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx); 710 if (o != t) 711 x = unshare_and_remap (x, t, o); 712 else 713 x = unshare_expr (x); 714 } 715 } 716 717 if (omp_is_reference (var)) 718 x = build_simple_mem_ref (x); 719 720 return x; 721 } 722 723 /* Build tree nodes to access the field for VAR on the sender side. */ 724 725 static tree 726 build_sender_ref (splay_tree_key key, omp_context *ctx) 727 { 728 tree field = lookup_sfield (key, ctx); 729 return omp_build_component_ref (ctx->sender_decl, field); 730 } 731 732 static tree 733 build_sender_ref (tree var, omp_context *ctx) 734 { 735 return build_sender_ref ((splay_tree_key) var, ctx); 736 } 737 738 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If 739 BASE_POINTERS_RESTRICT, declare the field with restrict. */ 740 741 static void 742 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx) 743 { 744 tree field, type, sfield = NULL_TREE; 745 splay_tree_key key = (splay_tree_key) var; 746 747 if ((mask & 16) != 0) 748 { 749 key = (splay_tree_key) &DECL_NAME (var); 750 gcc_checking_assert (key != (splay_tree_key) var); 751 } 752 if ((mask & 8) != 0) 753 { 754 key = (splay_tree_key) &DECL_UID (var); 755 gcc_checking_assert (key != (splay_tree_key) var); 756 } 757 gcc_assert ((mask & 1) == 0 758 || !splay_tree_lookup (ctx->field_map, key)); 759 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map 760 || !splay_tree_lookup (ctx->sfield_map, key)); 761 gcc_assert ((mask & 3) == 3 762 || !is_gimple_omp_oacc (ctx->stmt)); 763 764 type = TREE_TYPE (var); 765 if ((mask & 16) != 0) 766 type = lang_hooks.decls.omp_array_data (var, true); 767 768 /* Prevent redeclaring the var in the split-off function with a restrict 769 pointer type. Note that we only clear type itself, restrict qualifiers in 770 the pointed-to type will be ignored by points-to analysis. */ 771 if (POINTER_TYPE_P (type) 772 && TYPE_RESTRICT (type)) 773 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT); 774 775 if (mask & 4) 776 { 777 gcc_assert (TREE_CODE (type) == ARRAY_TYPE); 778 type = build_pointer_type (build_pointer_type (type)); 779 } 780 else if (by_ref) 781 type = build_pointer_type (type); 782 else if ((mask & 3) == 1 && omp_is_reference (var)) 783 type = TREE_TYPE (type); 784 785 field = build_decl (DECL_SOURCE_LOCATION (var), 786 FIELD_DECL, DECL_NAME (var), type); 787 788 /* Remember what variable this field was created for. This does have a 789 side effect of making dwarf2out ignore this member, so for helpful 790 debugging we clear it later in delete_omp_context. */ 791 DECL_ABSTRACT_ORIGIN (field) = var; 792 if ((mask & 16) == 0 && type == TREE_TYPE (var)) 793 { 794 SET_DECL_ALIGN (field, DECL_ALIGN (var)); 795 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var); 796 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var); 797 } 798 else 799 SET_DECL_ALIGN (field, TYPE_ALIGN (type)); 800 801 if ((mask & 3) == 3) 802 { 803 insert_field_into_struct (ctx->record_type, field); 804 if (ctx->srecord_type) 805 { 806 sfield = build_decl (DECL_SOURCE_LOCATION (var), 807 FIELD_DECL, DECL_NAME (var), type); 808 DECL_ABSTRACT_ORIGIN (sfield) = var; 809 SET_DECL_ALIGN (sfield, DECL_ALIGN (field)); 810 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field); 811 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field); 812 insert_field_into_struct (ctx->srecord_type, sfield); 813 } 814 } 815 else 816 { 817 if (ctx->srecord_type == NULL_TREE) 818 { 819 tree t; 820 821 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE); 822 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0); 823 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t)) 824 { 825 sfield = build_decl (DECL_SOURCE_LOCATION (t), 826 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t)); 827 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t); 828 insert_field_into_struct (ctx->srecord_type, sfield); 829 splay_tree_insert (ctx->sfield_map, 830 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t), 831 (splay_tree_value) sfield); 832 } 833 } 834 sfield = field; 835 insert_field_into_struct ((mask & 1) ? ctx->record_type 836 : ctx->srecord_type, field); 837 } 838 839 if (mask & 1) 840 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field); 841 if ((mask & 2) && ctx->sfield_map) 842 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield); 843 } 844 845 static tree 846 install_var_local (tree var, omp_context *ctx) 847 { 848 tree new_var = omp_copy_decl_1 (var, ctx); 849 insert_decl_map (&ctx->cb, var, new_var); 850 return new_var; 851 } 852 853 /* Adjust the replacement for DECL in CTX for the new context. This means 854 copying the DECL_VALUE_EXPR, and fixing up the type. */ 855 856 static void 857 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug) 858 { 859 tree new_decl, size; 860 861 new_decl = lookup_decl (decl, ctx); 862 863 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb); 864 865 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug) 866 && DECL_HAS_VALUE_EXPR_P (decl)) 867 { 868 tree ve = DECL_VALUE_EXPR (decl); 869 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL); 870 SET_DECL_VALUE_EXPR (new_decl, ve); 871 DECL_HAS_VALUE_EXPR_P (new_decl) = 1; 872 } 873 874 if (!TREE_CONSTANT (DECL_SIZE (new_decl))) 875 { 876 size = remap_decl (DECL_SIZE (decl), &ctx->cb); 877 if (size == error_mark_node) 878 size = TYPE_SIZE (TREE_TYPE (new_decl)); 879 DECL_SIZE (new_decl) = size; 880 881 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb); 882 if (size == error_mark_node) 883 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl)); 884 DECL_SIZE_UNIT (new_decl) = size; 885 } 886 } 887 888 /* The callback for remap_decl. Search all containing contexts for a 889 mapping of the variable; this avoids having to duplicate the splay 890 tree ahead of time. We know a mapping doesn't already exist in the 891 given context. Create new mappings to implement default semantics. */ 892 893 static tree 894 omp_copy_decl (tree var, copy_body_data *cb) 895 { 896 omp_context *ctx = (omp_context *) cb; 897 tree new_var; 898 899 if (TREE_CODE (var) == LABEL_DECL) 900 { 901 if (FORCED_LABEL (var) || DECL_NONLOCAL (var)) 902 return var; 903 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var)); 904 DECL_CONTEXT (new_var) = current_function_decl; 905 insert_decl_map (&ctx->cb, var, new_var); 906 return new_var; 907 } 908 909 while (!is_taskreg_ctx (ctx)) 910 { 911 ctx = ctx->outer; 912 if (ctx == NULL) 913 return var; 914 new_var = maybe_lookup_decl (var, ctx); 915 if (new_var) 916 return new_var; 917 } 918 919 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn) 920 return var; 921 922 return error_mark_node; 923 } 924 925 /* Create a new context, with OUTER_CTX being the surrounding context. */ 926 927 static omp_context * 928 new_omp_context (gimple *stmt, omp_context *outer_ctx) 929 { 930 omp_context *ctx = XCNEW (omp_context); 931 932 splay_tree_insert (all_contexts, (splay_tree_key) stmt, 933 (splay_tree_value) ctx); 934 ctx->stmt = stmt; 935 936 if (outer_ctx) 937 { 938 ctx->outer = outer_ctx; 939 ctx->cb = outer_ctx->cb; 940 ctx->cb.block = NULL; 941 ctx->depth = outer_ctx->depth + 1; 942 } 943 else 944 { 945 ctx->cb.src_fn = current_function_decl; 946 ctx->cb.dst_fn = current_function_decl; 947 ctx->cb.src_node = cgraph_node::get (current_function_decl); 948 gcc_checking_assert (ctx->cb.src_node); 949 ctx->cb.dst_node = ctx->cb.src_node; 950 ctx->cb.src_cfun = cfun; 951 ctx->cb.copy_decl = omp_copy_decl; 952 ctx->cb.eh_lp_nr = 0; 953 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE; 954 ctx->cb.adjust_array_error_bounds = true; 955 ctx->cb.dont_remap_vla_if_no_change = true; 956 ctx->depth = 1; 957 } 958 959 ctx->cb.decl_map = new hash_map<tree, tree>; 960 961 return ctx; 962 } 963 964 static gimple_seq maybe_catch_exception (gimple_seq); 965 966 /* Finalize task copyfn. */ 967 968 static void 969 finalize_task_copyfn (gomp_task *task_stmt) 970 { 971 struct function *child_cfun; 972 tree child_fn; 973 gimple_seq seq = NULL, new_seq; 974 gbind *bind; 975 976 child_fn = gimple_omp_task_copy_fn (task_stmt); 977 if (child_fn == NULL_TREE) 978 return; 979 980 child_cfun = DECL_STRUCT_FUNCTION (child_fn); 981 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties; 982 983 push_cfun (child_cfun); 984 bind = gimplify_body (child_fn, false); 985 gimple_seq_add_stmt (&seq, bind); 986 new_seq = maybe_catch_exception (seq); 987 if (new_seq != seq) 988 { 989 bind = gimple_build_bind (NULL, new_seq, NULL); 990 seq = NULL; 991 gimple_seq_add_stmt (&seq, bind); 992 } 993 gimple_set_body (child_fn, seq); 994 pop_cfun (); 995 996 /* Inform the callgraph about the new function. */ 997 cgraph_node *node = cgraph_node::get_create (child_fn); 998 node->parallelized_function = 1; 999 cgraph_node::add_new_function (child_fn, false); 1000 } 1001 1002 /* Destroy a omp_context data structures. Called through the splay tree 1003 value delete callback. */ 1004 1005 static void 1006 delete_omp_context (splay_tree_value value) 1007 { 1008 omp_context *ctx = (omp_context *) value; 1009 1010 delete ctx->cb.decl_map; 1011 1012 if (ctx->field_map) 1013 splay_tree_delete (ctx->field_map); 1014 if (ctx->sfield_map) 1015 splay_tree_delete (ctx->sfield_map); 1016 1017 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before 1018 it produces corrupt debug information. */ 1019 if (ctx->record_type) 1020 { 1021 tree t; 1022 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t)) 1023 DECL_ABSTRACT_ORIGIN (t) = NULL; 1024 } 1025 if (ctx->srecord_type) 1026 { 1027 tree t; 1028 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t)) 1029 DECL_ABSTRACT_ORIGIN (t) = NULL; 1030 } 1031 1032 if (ctx->task_reduction_map) 1033 { 1034 ctx->task_reductions.release (); 1035 delete ctx->task_reduction_map; 1036 } 1037 1038 delete ctx->lastprivate_conditional_map; 1039 1040 XDELETE (ctx); 1041 } 1042 1043 /* Fix up RECEIVER_DECL with a type that has been remapped to the child 1044 context. */ 1045 1046 static void 1047 fixup_child_record_type (omp_context *ctx) 1048 { 1049 tree f, type = ctx->record_type; 1050 1051 if (!ctx->receiver_decl) 1052 return; 1053 /* ??? It isn't sufficient to just call remap_type here, because 1054 variably_modified_type_p doesn't work the way we expect for 1055 record types. Testing each field for whether it needs remapping 1056 and creating a new record by hand works, however. */ 1057 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f)) 1058 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn)) 1059 break; 1060 if (f) 1061 { 1062 tree name, new_fields = NULL; 1063 1064 type = lang_hooks.types.make_type (RECORD_TYPE); 1065 name = DECL_NAME (TYPE_NAME (ctx->record_type)); 1066 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl), 1067 TYPE_DECL, name, type); 1068 TYPE_NAME (type) = name; 1069 1070 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f)) 1071 { 1072 tree new_f = copy_node (f); 1073 DECL_CONTEXT (new_f) = type; 1074 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb); 1075 DECL_CHAIN (new_f) = new_fields; 1076 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL); 1077 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, 1078 &ctx->cb, NULL); 1079 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r, 1080 &ctx->cb, NULL); 1081 new_fields = new_f; 1082 1083 /* Arrange to be able to look up the receiver field 1084 given the sender field. */ 1085 splay_tree_insert (ctx->field_map, (splay_tree_key) f, 1086 (splay_tree_value) new_f); 1087 } 1088 TYPE_FIELDS (type) = nreverse (new_fields); 1089 layout_type (type); 1090 } 1091 1092 /* In a target region we never modify any of the pointers in *.omp_data_i, 1093 so attempt to help the optimizers. */ 1094 if (is_gimple_omp_offloaded (ctx->stmt)) 1095 type = build_qualified_type (type, TYPE_QUAL_CONST); 1096 1097 TREE_TYPE (ctx->receiver_decl) 1098 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT); 1099 } 1100 1101 /* Instantiate decls as necessary in CTX to satisfy the data sharing 1102 specified by CLAUSES. */ 1103 1104 static void 1105 scan_sharing_clauses (tree clauses, omp_context *ctx) 1106 { 1107 tree c, decl; 1108 bool scan_array_reductions = false; 1109 1110 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) 1111 { 1112 bool by_ref; 1113 1114 switch (OMP_CLAUSE_CODE (c)) 1115 { 1116 case OMP_CLAUSE_PRIVATE: 1117 decl = OMP_CLAUSE_DECL (c); 1118 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c)) 1119 goto do_private; 1120 else if (!is_variable_sized (decl)) 1121 install_var_local (decl, ctx); 1122 break; 1123 1124 case OMP_CLAUSE_SHARED: 1125 decl = OMP_CLAUSE_DECL (c); 1126 /* Ignore shared directives in teams construct inside of 1127 target construct. */ 1128 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS 1129 && !is_host_teams_ctx (ctx)) 1130 { 1131 /* Global variables don't need to be copied, 1132 the receiver side will use them directly. */ 1133 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx); 1134 if (is_global_var (odecl)) 1135 break; 1136 insert_decl_map (&ctx->cb, decl, odecl); 1137 break; 1138 } 1139 gcc_assert (is_taskreg_ctx (ctx)); 1140 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl)) 1141 || !is_variable_sized (decl)); 1142 /* Global variables don't need to be copied, 1143 the receiver side will use them directly. */ 1144 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))) 1145 break; 1146 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)) 1147 { 1148 use_pointer_for_field (decl, ctx); 1149 break; 1150 } 1151 by_ref = use_pointer_for_field (decl, NULL); 1152 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c)) 1153 || TREE_ADDRESSABLE (decl) 1154 || by_ref 1155 || omp_is_reference (decl)) 1156 { 1157 by_ref = use_pointer_for_field (decl, ctx); 1158 install_var_field (decl, by_ref, 3, ctx); 1159 install_var_local (decl, ctx); 1160 break; 1161 } 1162 /* We don't need to copy const scalar vars back. */ 1163 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE); 1164 goto do_private; 1165 1166 case OMP_CLAUSE_REDUCTION: 1167 /* Collect 'reduction' clauses on OpenACC compute construct. */ 1168 if (is_gimple_omp_oacc (ctx->stmt) 1169 && is_gimple_omp_offloaded (ctx->stmt)) 1170 { 1171 /* No 'reduction' clauses on OpenACC 'kernels'. */ 1172 gcc_checking_assert (!is_oacc_kernels (ctx)); 1173 1174 ctx->local_reduction_clauses 1175 = tree_cons (NULL, c, ctx->local_reduction_clauses); 1176 } 1177 /* FALLTHRU */ 1178 1179 case OMP_CLAUSE_IN_REDUCTION: 1180 decl = OMP_CLAUSE_DECL (c); 1181 if (TREE_CODE (decl) == MEM_REF) 1182 { 1183 tree t = TREE_OPERAND (decl, 0); 1184 if (TREE_CODE (t) == POINTER_PLUS_EXPR) 1185 t = TREE_OPERAND (t, 0); 1186 if (TREE_CODE (t) == INDIRECT_REF 1187 || TREE_CODE (t) == ADDR_EXPR) 1188 t = TREE_OPERAND (t, 0); 1189 install_var_local (t, ctx); 1190 if (is_taskreg_ctx (ctx) 1191 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx)) 1192 || (is_task_ctx (ctx) 1193 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE 1194 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE 1195 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t))) 1196 == POINTER_TYPE))))) 1197 && !is_variable_sized (t) 1198 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION 1199 || (!OMP_CLAUSE_REDUCTION_TASK (c) 1200 && !is_task_ctx (ctx)))) 1201 { 1202 by_ref = use_pointer_for_field (t, NULL); 1203 if (is_task_ctx (ctx) 1204 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE 1205 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE) 1206 { 1207 install_var_field (t, false, 1, ctx); 1208 install_var_field (t, by_ref, 2, ctx); 1209 } 1210 else 1211 install_var_field (t, by_ref, 3, ctx); 1212 } 1213 break; 1214 } 1215 if (is_task_ctx (ctx) 1216 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION 1217 && OMP_CLAUSE_REDUCTION_TASK (c) 1218 && is_parallel_ctx (ctx))) 1219 { 1220 /* Global variables don't need to be copied, 1221 the receiver side will use them directly. */ 1222 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))) 1223 { 1224 by_ref = use_pointer_for_field (decl, ctx); 1225 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION) 1226 install_var_field (decl, by_ref, 3, ctx); 1227 } 1228 install_var_local (decl, ctx); 1229 break; 1230 } 1231 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION 1232 && OMP_CLAUSE_REDUCTION_TASK (c)) 1233 { 1234 install_var_local (decl, ctx); 1235 break; 1236 } 1237 goto do_private; 1238 1239 case OMP_CLAUSE_LASTPRIVATE: 1240 /* Let the corresponding firstprivate clause create 1241 the variable. */ 1242 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)) 1243 break; 1244 /* FALLTHRU */ 1245 1246 case OMP_CLAUSE_FIRSTPRIVATE: 1247 case OMP_CLAUSE_LINEAR: 1248 decl = OMP_CLAUSE_DECL (c); 1249 do_private: 1250 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE 1251 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR) 1252 && is_gimple_omp_offloaded (ctx->stmt)) 1253 { 1254 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE) 1255 install_var_field (decl, !omp_is_reference (decl), 3, ctx); 1256 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) 1257 install_var_field (decl, true, 3, ctx); 1258 else 1259 install_var_field (decl, false, 3, ctx); 1260 } 1261 if (is_variable_sized (decl)) 1262 { 1263 if (is_task_ctx (ctx)) 1264 install_var_field (decl, false, 1, ctx); 1265 break; 1266 } 1267 else if (is_taskreg_ctx (ctx)) 1268 { 1269 bool global 1270 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)); 1271 by_ref = use_pointer_for_field (decl, NULL); 1272 1273 if (is_task_ctx (ctx) 1274 && (global || by_ref || omp_is_reference (decl))) 1275 { 1276 install_var_field (decl, false, 1, ctx); 1277 if (!global) 1278 install_var_field (decl, by_ref, 2, ctx); 1279 } 1280 else if (!global) 1281 install_var_field (decl, by_ref, 3, ctx); 1282 } 1283 install_var_local (decl, ctx); 1284 break; 1285 1286 case OMP_CLAUSE_USE_DEVICE_PTR: 1287 case OMP_CLAUSE_USE_DEVICE_ADDR: 1288 decl = OMP_CLAUSE_DECL (c); 1289 1290 /* Fortran array descriptors. */ 1291 if (lang_hooks.decls.omp_array_data (decl, true)) 1292 install_var_field (decl, false, 19, ctx); 1293 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR 1294 && !omp_is_reference (decl) 1295 && !omp_is_allocatable_or_ptr (decl)) 1296 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) 1297 install_var_field (decl, true, 11, ctx); 1298 else 1299 install_var_field (decl, false, 11, ctx); 1300 if (DECL_SIZE (decl) 1301 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 1302 { 1303 tree decl2 = DECL_VALUE_EXPR (decl); 1304 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); 1305 decl2 = TREE_OPERAND (decl2, 0); 1306 gcc_assert (DECL_P (decl2)); 1307 install_var_local (decl2, ctx); 1308 } 1309 install_var_local (decl, ctx); 1310 break; 1311 1312 case OMP_CLAUSE_IS_DEVICE_PTR: 1313 decl = OMP_CLAUSE_DECL (c); 1314 goto do_private; 1315 1316 case OMP_CLAUSE__LOOPTEMP_: 1317 case OMP_CLAUSE__REDUCTEMP_: 1318 gcc_assert (is_taskreg_ctx (ctx)); 1319 decl = OMP_CLAUSE_DECL (c); 1320 install_var_field (decl, false, 3, ctx); 1321 install_var_local (decl, ctx); 1322 break; 1323 1324 case OMP_CLAUSE_COPYPRIVATE: 1325 case OMP_CLAUSE_COPYIN: 1326 decl = OMP_CLAUSE_DECL (c); 1327 by_ref = use_pointer_for_field (decl, NULL); 1328 install_var_field (decl, by_ref, 3, ctx); 1329 break; 1330 1331 case OMP_CLAUSE_FINAL: 1332 case OMP_CLAUSE_IF: 1333 case OMP_CLAUSE_NUM_THREADS: 1334 case OMP_CLAUSE_NUM_TEAMS: 1335 case OMP_CLAUSE_THREAD_LIMIT: 1336 case OMP_CLAUSE_DEVICE: 1337 case OMP_CLAUSE_SCHEDULE: 1338 case OMP_CLAUSE_DIST_SCHEDULE: 1339 case OMP_CLAUSE_DEPEND: 1340 case OMP_CLAUSE_PRIORITY: 1341 case OMP_CLAUSE_GRAINSIZE: 1342 case OMP_CLAUSE_NUM_TASKS: 1343 case OMP_CLAUSE_NUM_GANGS: 1344 case OMP_CLAUSE_NUM_WORKERS: 1345 case OMP_CLAUSE_VECTOR_LENGTH: 1346 if (ctx->outer) 1347 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer); 1348 break; 1349 1350 case OMP_CLAUSE_TO: 1351 case OMP_CLAUSE_FROM: 1352 case OMP_CLAUSE_MAP: 1353 if (ctx->outer) 1354 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer); 1355 decl = OMP_CLAUSE_DECL (c); 1356 /* Global variables with "omp declare target" attribute 1357 don't need to be copied, the receiver side will use them 1358 directly. However, global variables with "omp declare target link" 1359 attribute need to be copied. Or when ALWAYS modifier is used. */ 1360 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 1361 && DECL_P (decl) 1362 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER 1363 && (OMP_CLAUSE_MAP_KIND (c) 1364 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)) 1365 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) 1366 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO 1367 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM 1368 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM 1369 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)) 1370 && varpool_node::get_create (decl)->offloadable 1371 && !lookup_attribute ("omp declare target link", 1372 DECL_ATTRIBUTES (decl))) 1373 break; 1374 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 1375 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER) 1376 { 1377 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are 1378 not offloaded; there is nothing to map for those. */ 1379 if (!is_gimple_omp_offloaded (ctx->stmt) 1380 && !POINTER_TYPE_P (TREE_TYPE (decl)) 1381 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)) 1382 break; 1383 } 1384 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 1385 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER 1386 || (OMP_CLAUSE_MAP_KIND (c) 1387 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))) 1388 { 1389 if (TREE_CODE (decl) == COMPONENT_REF 1390 || (TREE_CODE (decl) == INDIRECT_REF 1391 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF 1392 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0))) 1393 == REFERENCE_TYPE))) 1394 break; 1395 if (DECL_SIZE (decl) 1396 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 1397 { 1398 tree decl2 = DECL_VALUE_EXPR (decl); 1399 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); 1400 decl2 = TREE_OPERAND (decl2, 0); 1401 gcc_assert (DECL_P (decl2)); 1402 install_var_local (decl2, ctx); 1403 } 1404 install_var_local (decl, ctx); 1405 break; 1406 } 1407 if (DECL_P (decl)) 1408 { 1409 if (DECL_SIZE (decl) 1410 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 1411 { 1412 tree decl2 = DECL_VALUE_EXPR (decl); 1413 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); 1414 decl2 = TREE_OPERAND (decl2, 0); 1415 gcc_assert (DECL_P (decl2)); 1416 install_var_field (decl2, true, 3, ctx); 1417 install_var_local (decl2, ctx); 1418 install_var_local (decl, ctx); 1419 } 1420 else 1421 { 1422 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 1423 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER 1424 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) 1425 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) 1426 install_var_field (decl, true, 7, ctx); 1427 else 1428 install_var_field (decl, true, 3, ctx); 1429 if (is_gimple_omp_offloaded (ctx->stmt) 1430 && !OMP_CLAUSE_MAP_IN_REDUCTION (c)) 1431 install_var_local (decl, ctx); 1432 } 1433 } 1434 else 1435 { 1436 tree base = get_base_address (decl); 1437 tree nc = OMP_CLAUSE_CHAIN (c); 1438 if (DECL_P (base) 1439 && nc != NULL_TREE 1440 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP 1441 && OMP_CLAUSE_DECL (nc) == base 1442 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER 1443 && integer_zerop (OMP_CLAUSE_SIZE (nc))) 1444 { 1445 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1; 1446 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1; 1447 } 1448 else 1449 { 1450 if (ctx->outer) 1451 { 1452 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer); 1453 decl = OMP_CLAUSE_DECL (c); 1454 } 1455 gcc_assert (!splay_tree_lookup (ctx->field_map, 1456 (splay_tree_key) decl)); 1457 tree field 1458 = build_decl (OMP_CLAUSE_LOCATION (c), 1459 FIELD_DECL, NULL_TREE, ptr_type_node); 1460 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node)); 1461 insert_field_into_struct (ctx->record_type, field); 1462 splay_tree_insert (ctx->field_map, (splay_tree_key) decl, 1463 (splay_tree_value) field); 1464 } 1465 } 1466 break; 1467 1468 case OMP_CLAUSE__GRIDDIM_: 1469 if (ctx->outer) 1470 { 1471 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer); 1472 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer); 1473 } 1474 break; 1475 1476 case OMP_CLAUSE_ORDER: 1477 ctx->order_concurrent = true; 1478 break; 1479 1480 case OMP_CLAUSE_BIND: 1481 ctx->loop_p = true; 1482 break; 1483 1484 case OMP_CLAUSE_NOWAIT: 1485 case OMP_CLAUSE_ORDERED: 1486 case OMP_CLAUSE_COLLAPSE: 1487 case OMP_CLAUSE_UNTIED: 1488 case OMP_CLAUSE_MERGEABLE: 1489 case OMP_CLAUSE_PROC_BIND: 1490 case OMP_CLAUSE_SAFELEN: 1491 case OMP_CLAUSE_SIMDLEN: 1492 case OMP_CLAUSE_THREADS: 1493 case OMP_CLAUSE_SIMD: 1494 case OMP_CLAUSE_NOGROUP: 1495 case OMP_CLAUSE_DEFAULTMAP: 1496 case OMP_CLAUSE_ASYNC: 1497 case OMP_CLAUSE_WAIT: 1498 case OMP_CLAUSE_GANG: 1499 case OMP_CLAUSE_WORKER: 1500 case OMP_CLAUSE_VECTOR: 1501 case OMP_CLAUSE_INDEPENDENT: 1502 case OMP_CLAUSE_AUTO: 1503 case OMP_CLAUSE_SEQ: 1504 case OMP_CLAUSE_TILE: 1505 case OMP_CLAUSE__SIMT_: 1506 case OMP_CLAUSE_DEFAULT: 1507 case OMP_CLAUSE_NONTEMPORAL: 1508 case OMP_CLAUSE_IF_PRESENT: 1509 case OMP_CLAUSE_FINALIZE: 1510 case OMP_CLAUSE_TASK_REDUCTION: 1511 break; 1512 1513 case OMP_CLAUSE_ALIGNED: 1514 decl = OMP_CLAUSE_DECL (c); 1515 if (is_global_var (decl) 1516 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) 1517 install_var_local (decl, ctx); 1518 break; 1519 1520 case OMP_CLAUSE__CONDTEMP_: 1521 decl = OMP_CLAUSE_DECL (c); 1522 if (is_parallel_ctx (ctx)) 1523 { 1524 install_var_field (decl, false, 3, ctx); 1525 install_var_local (decl, ctx); 1526 } 1527 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR 1528 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD 1529 && !OMP_CLAUSE__CONDTEMP__ITER (c)) 1530 install_var_local (decl, ctx); 1531 break; 1532 1533 case OMP_CLAUSE__CACHE_: 1534 default: 1535 gcc_unreachable (); 1536 } 1537 } 1538 1539 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) 1540 { 1541 switch (OMP_CLAUSE_CODE (c)) 1542 { 1543 case OMP_CLAUSE_LASTPRIVATE: 1544 /* Let the corresponding firstprivate clause create 1545 the variable. */ 1546 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)) 1547 scan_array_reductions = true; 1548 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)) 1549 break; 1550 /* FALLTHRU */ 1551 1552 case OMP_CLAUSE_FIRSTPRIVATE: 1553 case OMP_CLAUSE_PRIVATE: 1554 case OMP_CLAUSE_LINEAR: 1555 case OMP_CLAUSE_IS_DEVICE_PTR: 1556 decl = OMP_CLAUSE_DECL (c); 1557 if (is_variable_sized (decl)) 1558 { 1559 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE 1560 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR) 1561 && is_gimple_omp_offloaded (ctx->stmt)) 1562 { 1563 tree decl2 = DECL_VALUE_EXPR (decl); 1564 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); 1565 decl2 = TREE_OPERAND (decl2, 0); 1566 gcc_assert (DECL_P (decl2)); 1567 install_var_local (decl2, ctx); 1568 fixup_remapped_decl (decl2, ctx, false); 1569 } 1570 install_var_local (decl, ctx); 1571 } 1572 fixup_remapped_decl (decl, ctx, 1573 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE 1574 && OMP_CLAUSE_PRIVATE_DEBUG (c)); 1575 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 1576 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)) 1577 scan_array_reductions = true; 1578 break; 1579 1580 case OMP_CLAUSE_REDUCTION: 1581 case OMP_CLAUSE_IN_REDUCTION: 1582 decl = OMP_CLAUSE_DECL (c); 1583 if (TREE_CODE (decl) != MEM_REF) 1584 { 1585 if (is_variable_sized (decl)) 1586 install_var_local (decl, ctx); 1587 fixup_remapped_decl (decl, ctx, false); 1588 } 1589 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 1590 scan_array_reductions = true; 1591 break; 1592 1593 case OMP_CLAUSE_TASK_REDUCTION: 1594 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 1595 scan_array_reductions = true; 1596 break; 1597 1598 case OMP_CLAUSE_SHARED: 1599 /* Ignore shared directives in teams construct inside of 1600 target construct. */ 1601 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS 1602 && !is_host_teams_ctx (ctx)) 1603 break; 1604 decl = OMP_CLAUSE_DECL (c); 1605 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))) 1606 break; 1607 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)) 1608 { 1609 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, 1610 ctx->outer))) 1611 break; 1612 bool by_ref = use_pointer_for_field (decl, ctx); 1613 install_var_field (decl, by_ref, 11, ctx); 1614 break; 1615 } 1616 fixup_remapped_decl (decl, ctx, false); 1617 break; 1618 1619 case OMP_CLAUSE_MAP: 1620 if (!is_gimple_omp_offloaded (ctx->stmt)) 1621 break; 1622 decl = OMP_CLAUSE_DECL (c); 1623 if (DECL_P (decl) 1624 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER 1625 && (OMP_CLAUSE_MAP_KIND (c) 1626 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)) 1627 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE) 1628 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)) 1629 && varpool_node::get_create (decl)->offloadable) 1630 break; 1631 if (DECL_P (decl)) 1632 { 1633 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER 1634 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER) 1635 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE 1636 && !COMPLETE_TYPE_P (TREE_TYPE (decl))) 1637 { 1638 tree new_decl = lookup_decl (decl, ctx); 1639 TREE_TYPE (new_decl) 1640 = remap_type (TREE_TYPE (decl), &ctx->cb); 1641 } 1642 else if (DECL_SIZE (decl) 1643 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST) 1644 { 1645 tree decl2 = DECL_VALUE_EXPR (decl); 1646 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF); 1647 decl2 = TREE_OPERAND (decl2, 0); 1648 gcc_assert (DECL_P (decl2)); 1649 fixup_remapped_decl (decl2, ctx, false); 1650 fixup_remapped_decl (decl, ctx, true); 1651 } 1652 else 1653 fixup_remapped_decl (decl, ctx, false); 1654 } 1655 break; 1656 1657 case OMP_CLAUSE_COPYPRIVATE: 1658 case OMP_CLAUSE_COPYIN: 1659 case OMP_CLAUSE_DEFAULT: 1660 case OMP_CLAUSE_IF: 1661 case OMP_CLAUSE_NUM_THREADS: 1662 case OMP_CLAUSE_NUM_TEAMS: 1663 case OMP_CLAUSE_THREAD_LIMIT: 1664 case OMP_CLAUSE_DEVICE: 1665 case OMP_CLAUSE_SCHEDULE: 1666 case OMP_CLAUSE_DIST_SCHEDULE: 1667 case OMP_CLAUSE_NOWAIT: 1668 case OMP_CLAUSE_ORDERED: 1669 case OMP_CLAUSE_COLLAPSE: 1670 case OMP_CLAUSE_UNTIED: 1671 case OMP_CLAUSE_FINAL: 1672 case OMP_CLAUSE_MERGEABLE: 1673 case OMP_CLAUSE_PROC_BIND: 1674 case OMP_CLAUSE_SAFELEN: 1675 case OMP_CLAUSE_SIMDLEN: 1676 case OMP_CLAUSE_ALIGNED: 1677 case OMP_CLAUSE_DEPEND: 1678 case OMP_CLAUSE__LOOPTEMP_: 1679 case OMP_CLAUSE__REDUCTEMP_: 1680 case OMP_CLAUSE_TO: 1681 case OMP_CLAUSE_FROM: 1682 case OMP_CLAUSE_PRIORITY: 1683 case OMP_CLAUSE_GRAINSIZE: 1684 case OMP_CLAUSE_NUM_TASKS: 1685 case OMP_CLAUSE_THREADS: 1686 case OMP_CLAUSE_SIMD: 1687 case OMP_CLAUSE_NOGROUP: 1688 case OMP_CLAUSE_DEFAULTMAP: 1689 case OMP_CLAUSE_ORDER: 1690 case OMP_CLAUSE_BIND: 1691 case OMP_CLAUSE_USE_DEVICE_PTR: 1692 case OMP_CLAUSE_USE_DEVICE_ADDR: 1693 case OMP_CLAUSE_NONTEMPORAL: 1694 case OMP_CLAUSE_ASYNC: 1695 case OMP_CLAUSE_WAIT: 1696 case OMP_CLAUSE_NUM_GANGS: 1697 case OMP_CLAUSE_NUM_WORKERS: 1698 case OMP_CLAUSE_VECTOR_LENGTH: 1699 case OMP_CLAUSE_GANG: 1700 case OMP_CLAUSE_WORKER: 1701 case OMP_CLAUSE_VECTOR: 1702 case OMP_CLAUSE_INDEPENDENT: 1703 case OMP_CLAUSE_AUTO: 1704 case OMP_CLAUSE_SEQ: 1705 case OMP_CLAUSE_TILE: 1706 case OMP_CLAUSE__GRIDDIM_: 1707 case OMP_CLAUSE__SIMT_: 1708 case OMP_CLAUSE_IF_PRESENT: 1709 case OMP_CLAUSE_FINALIZE: 1710 case OMP_CLAUSE__CONDTEMP_: 1711 break; 1712 1713 case OMP_CLAUSE__CACHE_: 1714 default: 1715 gcc_unreachable (); 1716 } 1717 } 1718 1719 gcc_checking_assert (!scan_array_reductions 1720 || !is_gimple_omp_oacc (ctx->stmt)); 1721 if (scan_array_reductions) 1722 { 1723 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) 1724 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION 1725 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION 1726 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) 1727 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 1728 { 1729 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx); 1730 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx); 1731 } 1732 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 1733 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)) 1734 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx); 1735 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 1736 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)) 1737 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx); 1738 } 1739 } 1740 1741 /* Create a new name for omp child function. Returns an identifier. */ 1742 1743 static tree 1744 create_omp_child_function_name (bool task_copy) 1745 { 1746 return clone_function_name_numbered (current_function_decl, 1747 task_copy ? "_omp_cpyfn" : "_omp_fn"); 1748 } 1749 1750 /* Return true if CTX may belong to offloaded code: either if current function 1751 is offloaded, or any enclosing context corresponds to a target region. */ 1752 1753 static bool 1754 omp_maybe_offloaded_ctx (omp_context *ctx) 1755 { 1756 if (cgraph_node::get (current_function_decl)->offloadable) 1757 return true; 1758 for (; ctx; ctx = ctx->outer) 1759 if (is_gimple_omp_offloaded (ctx->stmt)) 1760 return true; 1761 return false; 1762 } 1763 1764 /* Build a decl for the omp child function. It'll not contain a body 1765 yet, just the bare decl. */ 1766 1767 static void 1768 create_omp_child_function (omp_context *ctx, bool task_copy) 1769 { 1770 tree decl, type, name, t; 1771 1772 name = create_omp_child_function_name (task_copy); 1773 if (task_copy) 1774 type = build_function_type_list (void_type_node, ptr_type_node, 1775 ptr_type_node, NULL_TREE); 1776 else 1777 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE); 1778 1779 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type); 1780 1781 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt) 1782 || !task_copy); 1783 if (!task_copy) 1784 ctx->cb.dst_fn = decl; 1785 else 1786 gimple_omp_task_set_copy_fn (ctx->stmt, decl); 1787 1788 TREE_STATIC (decl) = 1; 1789 TREE_USED (decl) = 1; 1790 DECL_ARTIFICIAL (decl) = 1; 1791 DECL_IGNORED_P (decl) = 0; 1792 TREE_PUBLIC (decl) = 0; 1793 DECL_UNINLINABLE (decl) = 1; 1794 DECL_EXTERNAL (decl) = 0; 1795 DECL_CONTEXT (decl) = NULL_TREE; 1796 DECL_INITIAL (decl) = make_node (BLOCK); 1797 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl; 1798 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl); 1799 /* Remove omp declare simd attribute from the new attributes. */ 1800 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl))) 1801 { 1802 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a))) 1803 a = a2; 1804 a = TREE_CHAIN (a); 1805 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;) 1806 if (is_attribute_p ("omp declare simd", get_attribute_name (*p))) 1807 *p = TREE_CHAIN (*p); 1808 else 1809 { 1810 tree chain = TREE_CHAIN (*p); 1811 *p = copy_node (*p); 1812 p = &TREE_CHAIN (*p); 1813 *p = chain; 1814 } 1815 } 1816 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl) 1817 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl); 1818 DECL_FUNCTION_SPECIFIC_TARGET (decl) 1819 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl); 1820 DECL_FUNCTION_VERSIONED (decl) 1821 = DECL_FUNCTION_VERSIONED (current_function_decl); 1822 1823 if (omp_maybe_offloaded_ctx (ctx)) 1824 { 1825 cgraph_node::get_create (decl)->offloadable = 1; 1826 if (ENABLE_OFFLOADING) 1827 g->have_offload = true; 1828 } 1829 1830 if (cgraph_node::get_create (decl)->offloadable 1831 && !lookup_attribute ("omp declare target", 1832 DECL_ATTRIBUTES (current_function_decl))) 1833 { 1834 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt) 1835 ? "omp target entrypoint" 1836 : "omp declare target"); 1837 DECL_ATTRIBUTES (decl) 1838 = tree_cons (get_identifier (target_attr), 1839 NULL_TREE, DECL_ATTRIBUTES (decl)); 1840 } 1841 1842 t = build_decl (DECL_SOURCE_LOCATION (decl), 1843 RESULT_DECL, NULL_TREE, void_type_node); 1844 DECL_ARTIFICIAL (t) = 1; 1845 DECL_IGNORED_P (t) = 1; 1846 DECL_CONTEXT (t) = decl; 1847 DECL_RESULT (decl) = t; 1848 1849 tree data_name = get_identifier (".omp_data_i"); 1850 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name, 1851 ptr_type_node); 1852 DECL_ARTIFICIAL (t) = 1; 1853 DECL_NAMELESS (t) = 1; 1854 DECL_ARG_TYPE (t) = ptr_type_node; 1855 DECL_CONTEXT (t) = current_function_decl; 1856 TREE_USED (t) = 1; 1857 TREE_READONLY (t) = 1; 1858 DECL_ARGUMENTS (decl) = t; 1859 if (!task_copy) 1860 ctx->receiver_decl = t; 1861 else 1862 { 1863 t = build_decl (DECL_SOURCE_LOCATION (decl), 1864 PARM_DECL, get_identifier (".omp_data_o"), 1865 ptr_type_node); 1866 DECL_ARTIFICIAL (t) = 1; 1867 DECL_NAMELESS (t) = 1; 1868 DECL_ARG_TYPE (t) = ptr_type_node; 1869 DECL_CONTEXT (t) = current_function_decl; 1870 TREE_USED (t) = 1; 1871 TREE_ADDRESSABLE (t) = 1; 1872 DECL_CHAIN (t) = DECL_ARGUMENTS (decl); 1873 DECL_ARGUMENTS (decl) = t; 1874 } 1875 1876 /* Allocate memory for the function structure. The call to 1877 allocate_struct_function clobbers CFUN, so we need to restore 1878 it afterward. */ 1879 push_struct_function (decl); 1880 cfun->function_end_locus = gimple_location (ctx->stmt); 1881 init_tree_ssa (cfun); 1882 pop_cfun (); 1883 } 1884 1885 /* Callback for walk_gimple_seq. Check if combined parallel 1886 contains gimple_omp_for_combined_into_p OMP_FOR. */ 1887 1888 tree 1889 omp_find_combined_for (gimple_stmt_iterator *gsi_p, 1890 bool *handled_ops_p, 1891 struct walk_stmt_info *wi) 1892 { 1893 gimple *stmt = gsi_stmt (*gsi_p); 1894 1895 *handled_ops_p = true; 1896 switch (gimple_code (stmt)) 1897 { 1898 WALK_SUBSTMTS; 1899 1900 case GIMPLE_OMP_FOR: 1901 if (gimple_omp_for_combined_into_p (stmt) 1902 && gimple_omp_for_kind (stmt) 1903 == *(const enum gf_mask *) (wi->info)) 1904 { 1905 wi->info = stmt; 1906 return integer_zero_node; 1907 } 1908 break; 1909 default: 1910 break; 1911 } 1912 return NULL; 1913 } 1914 1915 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */ 1916 1917 static void 1918 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt, 1919 omp_context *outer_ctx) 1920 { 1921 struct walk_stmt_info wi; 1922 1923 memset (&wi, 0, sizeof (wi)); 1924 wi.val_only = true; 1925 wi.info = (void *) &msk; 1926 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi); 1927 if (wi.info != (void *) &msk) 1928 { 1929 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info); 1930 struct omp_for_data fd; 1931 omp_extract_for_data (for_stmt, &fd, NULL); 1932 /* We need two temporaries with fd.loop.v type (istart/iend) 1933 and then (fd.collapse - 1) temporaries with the same 1934 type for count2 ... countN-1 vars if not constant. */ 1935 size_t count = 2, i; 1936 tree type = fd.iter_type; 1937 if (fd.collapse > 1 1938 && TREE_CODE (fd.loop.n2) != INTEGER_CST) 1939 { 1940 count += fd.collapse - 1; 1941 /* If there are lastprivate clauses on the inner 1942 GIMPLE_OMP_FOR, add one more temporaries for the total number 1943 of iterations (product of count1 ... countN-1). */ 1944 if (omp_find_clause (gimple_omp_for_clauses (for_stmt), 1945 OMP_CLAUSE_LASTPRIVATE)) 1946 count++; 1947 else if (msk == GF_OMP_FOR_KIND_FOR 1948 && omp_find_clause (gimple_omp_parallel_clauses (stmt), 1949 OMP_CLAUSE_LASTPRIVATE)) 1950 count++; 1951 } 1952 for (i = 0; i < count; i++) 1953 { 1954 tree temp = create_tmp_var (type); 1955 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_); 1956 insert_decl_map (&outer_ctx->cb, temp, temp); 1957 OMP_CLAUSE_DECL (c) = temp; 1958 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt); 1959 gimple_omp_taskreg_set_clauses (stmt, c); 1960 } 1961 } 1962 if (msk == GF_OMP_FOR_KIND_TASKLOOP 1963 && omp_find_clause (gimple_omp_task_clauses (stmt), 1964 OMP_CLAUSE_REDUCTION)) 1965 { 1966 tree type = build_pointer_type (pointer_sized_int_node); 1967 tree temp = create_tmp_var (type); 1968 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_); 1969 insert_decl_map (&outer_ctx->cb, temp, temp); 1970 OMP_CLAUSE_DECL (c) = temp; 1971 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt); 1972 gimple_omp_task_set_clauses (stmt, c); 1973 } 1974 } 1975 1976 /* Scan an OpenMP parallel directive. */ 1977 1978 static void 1979 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx) 1980 { 1981 omp_context *ctx; 1982 tree name; 1983 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi)); 1984 1985 /* Ignore parallel directives with empty bodies, unless there 1986 are copyin clauses. */ 1987 if (optimize > 0 1988 && empty_body_p (gimple_omp_body (stmt)) 1989 && omp_find_clause (gimple_omp_parallel_clauses (stmt), 1990 OMP_CLAUSE_COPYIN) == NULL) 1991 { 1992 gsi_replace (gsi, gimple_build_nop (), false); 1993 return; 1994 } 1995 1996 if (gimple_omp_parallel_combined_p (stmt)) 1997 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx); 1998 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt), 1999 OMP_CLAUSE_REDUCTION); 2000 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION)) 2001 if (OMP_CLAUSE_REDUCTION_TASK (c)) 2002 { 2003 tree type = build_pointer_type (pointer_sized_int_node); 2004 tree temp = create_tmp_var (type); 2005 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_); 2006 if (outer_ctx) 2007 insert_decl_map (&outer_ctx->cb, temp, temp); 2008 OMP_CLAUSE_DECL (c) = temp; 2009 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt); 2010 gimple_omp_parallel_set_clauses (stmt, c); 2011 break; 2012 } 2013 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE) 2014 break; 2015 2016 ctx = new_omp_context (stmt, outer_ctx); 2017 taskreg_contexts.safe_push (ctx); 2018 if (taskreg_nesting_level > 1) 2019 ctx->is_nested = true; 2020 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0); 2021 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE); 2022 name = create_tmp_var_name (".omp_data_s"); 2023 name = build_decl (gimple_location (stmt), 2024 TYPE_DECL, name, ctx->record_type); 2025 DECL_ARTIFICIAL (name) = 1; 2026 DECL_NAMELESS (name) = 1; 2027 TYPE_NAME (ctx->record_type) = name; 2028 TYPE_ARTIFICIAL (ctx->record_type) = 1; 2029 if (!gimple_omp_parallel_grid_phony (stmt)) 2030 { 2031 create_omp_child_function (ctx, false); 2032 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn); 2033 } 2034 2035 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx); 2036 scan_omp (gimple_omp_body_ptr (stmt), ctx); 2037 2038 if (TYPE_FIELDS (ctx->record_type) == NULL) 2039 ctx->record_type = ctx->receiver_decl = NULL; 2040 } 2041 2042 /* Scan an OpenMP task directive. */ 2043 2044 static void 2045 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx) 2046 { 2047 omp_context *ctx; 2048 tree name, t; 2049 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi)); 2050 2051 /* Ignore task directives with empty bodies, unless they have depend 2052 clause. */ 2053 if (optimize > 0 2054 && gimple_omp_body (stmt) 2055 && empty_body_p (gimple_omp_body (stmt)) 2056 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND)) 2057 { 2058 gsi_replace (gsi, gimple_build_nop (), false); 2059 return; 2060 } 2061 2062 if (gimple_omp_task_taskloop_p (stmt)) 2063 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx); 2064 2065 ctx = new_omp_context (stmt, outer_ctx); 2066 2067 if (gimple_omp_task_taskwait_p (stmt)) 2068 { 2069 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx); 2070 return; 2071 } 2072 2073 taskreg_contexts.safe_push (ctx); 2074 if (taskreg_nesting_level > 1) 2075 ctx->is_nested = true; 2076 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0); 2077 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE); 2078 name = create_tmp_var_name (".omp_data_s"); 2079 name = build_decl (gimple_location (stmt), 2080 TYPE_DECL, name, ctx->record_type); 2081 DECL_ARTIFICIAL (name) = 1; 2082 DECL_NAMELESS (name) = 1; 2083 TYPE_NAME (ctx->record_type) = name; 2084 TYPE_ARTIFICIAL (ctx->record_type) = 1; 2085 create_omp_child_function (ctx, false); 2086 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn); 2087 2088 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx); 2089 2090 if (ctx->srecord_type) 2091 { 2092 name = create_tmp_var_name (".omp_data_a"); 2093 name = build_decl (gimple_location (stmt), 2094 TYPE_DECL, name, ctx->srecord_type); 2095 DECL_ARTIFICIAL (name) = 1; 2096 DECL_NAMELESS (name) = 1; 2097 TYPE_NAME (ctx->srecord_type) = name; 2098 TYPE_ARTIFICIAL (ctx->srecord_type) = 1; 2099 create_omp_child_function (ctx, true); 2100 } 2101 2102 scan_omp (gimple_omp_body_ptr (stmt), ctx); 2103 2104 if (TYPE_FIELDS (ctx->record_type) == NULL) 2105 { 2106 ctx->record_type = ctx->receiver_decl = NULL; 2107 t = build_int_cst (long_integer_type_node, 0); 2108 gimple_omp_task_set_arg_size (stmt, t); 2109 t = build_int_cst (long_integer_type_node, 1); 2110 gimple_omp_task_set_arg_align (stmt, t); 2111 } 2112 } 2113 2114 /* Helper function for finish_taskreg_scan, called through walk_tree. 2115 If maybe_lookup_decl_in_outer_context returns non-NULL for some 2116 tree, replace it in the expression. */ 2117 2118 static tree 2119 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data) 2120 { 2121 if (VAR_P (*tp)) 2122 { 2123 omp_context *ctx = (omp_context *) data; 2124 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx); 2125 if (t != *tp) 2126 { 2127 if (DECL_HAS_VALUE_EXPR_P (t)) 2128 t = unshare_expr (DECL_VALUE_EXPR (t)); 2129 *tp = t; 2130 } 2131 *walk_subtrees = 0; 2132 } 2133 else if (IS_TYPE_OR_DECL_P (*tp)) 2134 *walk_subtrees = 0; 2135 return NULL_TREE; 2136 } 2137 2138 /* If any decls have been made addressable during scan_omp, 2139 adjust their fields if needed, and layout record types 2140 of parallel/task constructs. */ 2141 2142 static void 2143 finish_taskreg_scan (omp_context *ctx) 2144 { 2145 if (ctx->record_type == NULL_TREE) 2146 return; 2147 2148 /* If any task_shared_vars were needed, verify all 2149 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS} 2150 statements if use_pointer_for_field hasn't changed 2151 because of that. If it did, update field types now. */ 2152 if (task_shared_vars) 2153 { 2154 tree c; 2155 2156 for (c = gimple_omp_taskreg_clauses (ctx->stmt); 2157 c; c = OMP_CLAUSE_CHAIN (c)) 2158 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED 2159 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)) 2160 { 2161 tree decl = OMP_CLAUSE_DECL (c); 2162 2163 /* Global variables don't need to be copied, 2164 the receiver side will use them directly. */ 2165 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))) 2166 continue; 2167 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl)) 2168 || !use_pointer_for_field (decl, ctx)) 2169 continue; 2170 tree field = lookup_field (decl, ctx); 2171 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE 2172 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl)) 2173 continue; 2174 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl)); 2175 TREE_THIS_VOLATILE (field) = 0; 2176 DECL_USER_ALIGN (field) = 0; 2177 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field))); 2178 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field)) 2179 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field)); 2180 if (ctx->srecord_type) 2181 { 2182 tree sfield = lookup_sfield (decl, ctx); 2183 TREE_TYPE (sfield) = TREE_TYPE (field); 2184 TREE_THIS_VOLATILE (sfield) = 0; 2185 DECL_USER_ALIGN (sfield) = 0; 2186 SET_DECL_ALIGN (sfield, DECL_ALIGN (field)); 2187 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield)) 2188 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield)); 2189 } 2190 } 2191 } 2192 2193 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL) 2194 { 2195 tree clauses = gimple_omp_parallel_clauses (ctx->stmt); 2196 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_); 2197 if (c) 2198 { 2199 /* Move the _reductemp_ clause first. GOMP_parallel_reductions 2200 expects to find it at the start of data. */ 2201 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx); 2202 tree *p = &TYPE_FIELDS (ctx->record_type); 2203 while (*p) 2204 if (*p == f) 2205 { 2206 *p = DECL_CHAIN (*p); 2207 break; 2208 } 2209 else 2210 p = &DECL_CHAIN (*p); 2211 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type); 2212 TYPE_FIELDS (ctx->record_type) = f; 2213 } 2214 layout_type (ctx->record_type); 2215 fixup_child_record_type (ctx); 2216 } 2217 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS) 2218 { 2219 layout_type (ctx->record_type); 2220 fixup_child_record_type (ctx); 2221 } 2222 else 2223 { 2224 location_t loc = gimple_location (ctx->stmt); 2225 tree *p, vla_fields = NULL_TREE, *q = &vla_fields; 2226 /* Move VLA fields to the end. */ 2227 p = &TYPE_FIELDS (ctx->record_type); 2228 while (*p) 2229 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p)) 2230 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p)))) 2231 { 2232 *q = *p; 2233 *p = TREE_CHAIN (*p); 2234 TREE_CHAIN (*q) = NULL_TREE; 2235 q = &TREE_CHAIN (*q); 2236 } 2237 else 2238 p = &DECL_CHAIN (*p); 2239 *p = vla_fields; 2240 if (gimple_omp_task_taskloop_p (ctx->stmt)) 2241 { 2242 /* Move fields corresponding to first and second _looptemp_ 2243 clause first. There are filled by GOMP_taskloop 2244 and thus need to be in specific positions. */ 2245 tree clauses = gimple_omp_task_clauses (ctx->stmt); 2246 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_); 2247 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1), 2248 OMP_CLAUSE__LOOPTEMP_); 2249 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_); 2250 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx); 2251 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx); 2252 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE; 2253 p = &TYPE_FIELDS (ctx->record_type); 2254 while (*p) 2255 if (*p == f1 || *p == f2 || *p == f3) 2256 *p = DECL_CHAIN (*p); 2257 else 2258 p = &DECL_CHAIN (*p); 2259 DECL_CHAIN (f1) = f2; 2260 if (c3) 2261 { 2262 DECL_CHAIN (f2) = f3; 2263 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type); 2264 } 2265 else 2266 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type); 2267 TYPE_FIELDS (ctx->record_type) = f1; 2268 if (ctx->srecord_type) 2269 { 2270 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx); 2271 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx); 2272 if (c3) 2273 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx); 2274 p = &TYPE_FIELDS (ctx->srecord_type); 2275 while (*p) 2276 if (*p == f1 || *p == f2 || *p == f3) 2277 *p = DECL_CHAIN (*p); 2278 else 2279 p = &DECL_CHAIN (*p); 2280 DECL_CHAIN (f1) = f2; 2281 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type); 2282 if (c3) 2283 { 2284 DECL_CHAIN (f2) = f3; 2285 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type); 2286 } 2287 else 2288 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type); 2289 TYPE_FIELDS (ctx->srecord_type) = f1; 2290 } 2291 } 2292 layout_type (ctx->record_type); 2293 fixup_child_record_type (ctx); 2294 if (ctx->srecord_type) 2295 layout_type (ctx->srecord_type); 2296 tree t = fold_convert_loc (loc, long_integer_type_node, 2297 TYPE_SIZE_UNIT (ctx->record_type)); 2298 if (TREE_CODE (t) != INTEGER_CST) 2299 { 2300 t = unshare_expr (t); 2301 walk_tree (&t, finish_taskreg_remap, ctx, NULL); 2302 } 2303 gimple_omp_task_set_arg_size (ctx->stmt, t); 2304 t = build_int_cst (long_integer_type_node, 2305 TYPE_ALIGN_UNIT (ctx->record_type)); 2306 gimple_omp_task_set_arg_align (ctx->stmt, t); 2307 } 2308 } 2309 2310 /* Find the enclosing offload context. */ 2311 2312 static omp_context * 2313 enclosing_target_ctx (omp_context *ctx) 2314 { 2315 for (; ctx; ctx = ctx->outer) 2316 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET) 2317 break; 2318 2319 return ctx; 2320 } 2321 2322 /* Return true if ctx is part of an oacc kernels region. */ 2323 2324 static bool 2325 ctx_in_oacc_kernels_region (omp_context *ctx) 2326 { 2327 for (;ctx != NULL; ctx = ctx->outer) 2328 { 2329 gimple *stmt = ctx->stmt; 2330 if (gimple_code (stmt) == GIMPLE_OMP_TARGET 2331 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS) 2332 return true; 2333 } 2334 2335 return false; 2336 } 2337 2338 /* Check the parallelism clauses inside a kernels regions. 2339 Until kernels handling moves to use the same loop indirection 2340 scheme as parallel, we need to do this checking early. */ 2341 2342 static unsigned 2343 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx) 2344 { 2345 bool checking = true; 2346 unsigned outer_mask = 0; 2347 unsigned this_mask = 0; 2348 bool has_seq = false, has_auto = false; 2349 2350 if (ctx->outer) 2351 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer); 2352 if (!stmt) 2353 { 2354 checking = false; 2355 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR) 2356 return outer_mask; 2357 stmt = as_a <gomp_for *> (ctx->stmt); 2358 } 2359 2360 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c)) 2361 { 2362 switch (OMP_CLAUSE_CODE (c)) 2363 { 2364 case OMP_CLAUSE_GANG: 2365 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG); 2366 break; 2367 case OMP_CLAUSE_WORKER: 2368 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER); 2369 break; 2370 case OMP_CLAUSE_VECTOR: 2371 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR); 2372 break; 2373 case OMP_CLAUSE_SEQ: 2374 has_seq = true; 2375 break; 2376 case OMP_CLAUSE_AUTO: 2377 has_auto = true; 2378 break; 2379 default: 2380 break; 2381 } 2382 } 2383 2384 if (checking) 2385 { 2386 if (has_seq && (this_mask || has_auto)) 2387 error_at (gimple_location (stmt), "%<seq%> overrides other" 2388 " OpenACC loop specifiers"); 2389 else if (has_auto && this_mask) 2390 error_at (gimple_location (stmt), "%<auto%> conflicts with other" 2391 " OpenACC loop specifiers"); 2392 2393 if (this_mask & outer_mask) 2394 error_at (gimple_location (stmt), "inner loop uses same" 2395 " OpenACC parallelism as containing loop"); 2396 } 2397 2398 return outer_mask | this_mask; 2399 } 2400 2401 /* Scan a GIMPLE_OMP_FOR. */ 2402 2403 static omp_context * 2404 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx) 2405 { 2406 omp_context *ctx; 2407 size_t i; 2408 tree clauses = gimple_omp_for_clauses (stmt); 2409 2410 ctx = new_omp_context (stmt, outer_ctx); 2411 2412 if (is_gimple_omp_oacc (stmt)) 2413 { 2414 omp_context *tgt = enclosing_target_ctx (outer_ctx); 2415 2416 if (!(tgt && is_oacc_kernels (tgt))) 2417 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) 2418 { 2419 tree c_op0; 2420 switch (OMP_CLAUSE_CODE (c)) 2421 { 2422 case OMP_CLAUSE_GANG: 2423 c_op0 = OMP_CLAUSE_GANG_EXPR (c); 2424 break; 2425 2426 case OMP_CLAUSE_WORKER: 2427 c_op0 = OMP_CLAUSE_WORKER_EXPR (c); 2428 break; 2429 2430 case OMP_CLAUSE_VECTOR: 2431 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c); 2432 break; 2433 2434 default: 2435 continue; 2436 } 2437 2438 if (c_op0) 2439 { 2440 error_at (OMP_CLAUSE_LOCATION (c), 2441 "argument not permitted on %qs clause", 2442 omp_clause_code_name[OMP_CLAUSE_CODE (c)]); 2443 if (tgt) 2444 inform (gimple_location (tgt->stmt), 2445 "enclosing parent compute construct"); 2446 else if (oacc_get_fn_attrib (current_function_decl)) 2447 inform (DECL_SOURCE_LOCATION (current_function_decl), 2448 "enclosing routine"); 2449 else 2450 gcc_unreachable (); 2451 } 2452 } 2453 2454 if (tgt && is_oacc_kernels (tgt)) 2455 check_oacc_kernel_gwv (stmt, ctx); 2456 2457 /* Collect all variables named in reductions on this loop. Ensure 2458 that, if this loop has a reduction on some variable v, and there is 2459 a reduction on v somewhere in an outer context, then there is a 2460 reduction on v on all intervening loops as well. */ 2461 tree local_reduction_clauses = NULL; 2462 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c)) 2463 { 2464 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION) 2465 local_reduction_clauses 2466 = tree_cons (NULL, c, local_reduction_clauses); 2467 } 2468 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL) 2469 ctx->outer_reduction_clauses 2470 = chainon (unshare_expr (ctx->outer->local_reduction_clauses), 2471 ctx->outer->outer_reduction_clauses); 2472 tree outer_reduction_clauses = ctx->outer_reduction_clauses; 2473 tree local_iter = local_reduction_clauses; 2474 for (; local_iter; local_iter = TREE_CHAIN (local_iter)) 2475 { 2476 tree local_clause = TREE_VALUE (local_iter); 2477 tree local_var = OMP_CLAUSE_DECL (local_clause); 2478 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause); 2479 bool have_outer_reduction = false; 2480 tree ctx_iter = outer_reduction_clauses; 2481 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter)) 2482 { 2483 tree outer_clause = TREE_VALUE (ctx_iter); 2484 tree outer_var = OMP_CLAUSE_DECL (outer_clause); 2485 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause); 2486 if (outer_var == local_var && outer_op != local_op) 2487 { 2488 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0, 2489 "conflicting reduction operations for %qE", 2490 local_var); 2491 inform (OMP_CLAUSE_LOCATION (outer_clause), 2492 "location of the previous reduction for %qE", 2493 outer_var); 2494 } 2495 if (outer_var == local_var) 2496 { 2497 have_outer_reduction = true; 2498 break; 2499 } 2500 } 2501 if (have_outer_reduction) 2502 { 2503 /* There is a reduction on outer_var both on this loop and on 2504 some enclosing loop. Walk up the context tree until such a 2505 loop with a reduction on outer_var is found, and complain 2506 about all intervening loops that do not have such a 2507 reduction. */ 2508 struct omp_context *curr_loop = ctx->outer; 2509 bool found = false; 2510 while (curr_loop != NULL) 2511 { 2512 tree curr_iter = curr_loop->local_reduction_clauses; 2513 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter)) 2514 { 2515 tree curr_clause = TREE_VALUE (curr_iter); 2516 tree curr_var = OMP_CLAUSE_DECL (curr_clause); 2517 if (curr_var == local_var) 2518 { 2519 found = true; 2520 break; 2521 } 2522 } 2523 if (!found) 2524 warning_at (gimple_location (curr_loop->stmt), 0, 2525 "nested loop in reduction needs " 2526 "reduction clause for %qE", 2527 local_var); 2528 else 2529 break; 2530 curr_loop = curr_loop->outer; 2531 } 2532 } 2533 } 2534 ctx->local_reduction_clauses = local_reduction_clauses; 2535 ctx->outer_reduction_clauses 2536 = chainon (unshare_expr (ctx->local_reduction_clauses), 2537 ctx->outer_reduction_clauses); 2538 2539 if (tgt && is_oacc_kernels (tgt)) 2540 { 2541 /* Strip out reductions, as they are not handled yet. */ 2542 tree *prev_ptr = &clauses; 2543 2544 while (tree probe = *prev_ptr) 2545 { 2546 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe); 2547 2548 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION) 2549 *prev_ptr = *next_ptr; 2550 else 2551 prev_ptr = next_ptr; 2552 } 2553 2554 gimple_omp_for_set_clauses (stmt, clauses); 2555 } 2556 } 2557 2558 scan_sharing_clauses (clauses, ctx); 2559 2560 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx); 2561 for (i = 0; i < gimple_omp_for_collapse (stmt); i++) 2562 { 2563 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx); 2564 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx); 2565 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx); 2566 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx); 2567 } 2568 scan_omp (gimple_omp_body_ptr (stmt), ctx); 2569 return ctx; 2570 } 2571 2572 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */ 2573 2574 static void 2575 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt, 2576 omp_context *outer_ctx) 2577 { 2578 gbind *bind = gimple_build_bind (NULL, NULL, NULL); 2579 gsi_replace (gsi, bind, false); 2580 gimple_seq seq = NULL; 2581 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0); 2582 tree cond = create_tmp_var_raw (integer_type_node); 2583 DECL_CONTEXT (cond) = current_function_decl; 2584 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1; 2585 gimple_bind_set_vars (bind, cond); 2586 gimple_call_set_lhs (g, cond); 2587 gimple_seq_add_stmt (&seq, g); 2588 tree lab1 = create_artificial_label (UNKNOWN_LOCATION); 2589 tree lab2 = create_artificial_label (UNKNOWN_LOCATION); 2590 tree lab3 = create_artificial_label (UNKNOWN_LOCATION); 2591 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2); 2592 gimple_seq_add_stmt (&seq, g); 2593 g = gimple_build_label (lab1); 2594 gimple_seq_add_stmt (&seq, g); 2595 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt); 2596 gomp_for *new_stmt = as_a <gomp_for *> (new_seq); 2597 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_); 2598 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt); 2599 gimple_omp_for_set_clauses (new_stmt, clause); 2600 gimple_seq_add_stmt (&seq, new_stmt); 2601 g = gimple_build_goto (lab3); 2602 gimple_seq_add_stmt (&seq, g); 2603 g = gimple_build_label (lab2); 2604 gimple_seq_add_stmt (&seq, g); 2605 gimple_seq_add_stmt (&seq, stmt); 2606 g = gimple_build_label (lab3); 2607 gimple_seq_add_stmt (&seq, g); 2608 gimple_bind_set_body (bind, seq); 2609 update_stmt (bind); 2610 scan_omp_for (new_stmt, outer_ctx); 2611 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt; 2612 } 2613 2614 static tree omp_find_scan (gimple_stmt_iterator *, bool *, 2615 struct walk_stmt_info *); 2616 static omp_context *maybe_lookup_ctx (gimple *); 2617 2618 /* Duplicate #pragma omp simd, one for the scan input phase loop and one 2619 for scan phase loop. */ 2620 2621 static void 2622 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt, 2623 omp_context *outer_ctx) 2624 { 2625 /* The only change between inclusive and exclusive scan will be 2626 within the first simd loop, so just use inclusive in the 2627 worksharing loop. */ 2628 outer_ctx->scan_inclusive = true; 2629 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE); 2630 OMP_CLAUSE_DECL (c) = integer_zero_node; 2631 2632 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE); 2633 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c); 2634 gsi_replace (gsi, input_stmt, false); 2635 gimple_seq input_body = NULL; 2636 gimple_seq_add_stmt (&input_body, stmt); 2637 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT); 2638 2639 gimple_stmt_iterator input1_gsi = gsi_none (); 2640 struct walk_stmt_info wi; 2641 memset (&wi, 0, sizeof (wi)); 2642 wi.val_only = true; 2643 wi.info = (void *) &input1_gsi; 2644 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi); 2645 gcc_assert (!gsi_end_p (input1_gsi)); 2646 2647 gimple *input_stmt1 = gsi_stmt (input1_gsi); 2648 gsi_next (&input1_gsi); 2649 gimple *scan_stmt1 = gsi_stmt (input1_gsi); 2650 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN); 2651 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1)); 2652 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE) 2653 std::swap (input_stmt1, scan_stmt1); 2654 2655 gimple_seq input_body1 = gimple_omp_body (input_stmt1); 2656 gimple_omp_set_body (input_stmt1, NULL); 2657 2658 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt); 2659 gomp_for *new_stmt = as_a <gomp_for *> (scan_body); 2660 2661 gimple_omp_set_body (input_stmt1, input_body1); 2662 gimple_omp_set_body (scan_stmt1, NULL); 2663 2664 gimple_stmt_iterator input2_gsi = gsi_none (); 2665 memset (&wi, 0, sizeof (wi)); 2666 wi.val_only = true; 2667 wi.info = (void *) &input2_gsi; 2668 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan, 2669 NULL, &wi); 2670 gcc_assert (!gsi_end_p (input2_gsi)); 2671 2672 gimple *input_stmt2 = gsi_stmt (input2_gsi); 2673 gsi_next (&input2_gsi); 2674 gimple *scan_stmt2 = gsi_stmt (input2_gsi); 2675 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN); 2676 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE) 2677 std::swap (input_stmt2, scan_stmt2); 2678 2679 gimple_omp_set_body (input_stmt2, NULL); 2680 2681 gimple_omp_set_body (input_stmt, input_body); 2682 gimple_omp_set_body (scan_stmt, scan_body); 2683 2684 omp_context *ctx = new_omp_context (input_stmt, outer_ctx); 2685 scan_omp (gimple_omp_body_ptr (input_stmt), ctx); 2686 2687 ctx = new_omp_context (scan_stmt, outer_ctx); 2688 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx); 2689 2690 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true; 2691 } 2692 2693 /* Scan an OpenMP sections directive. */ 2694 2695 static void 2696 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx) 2697 { 2698 omp_context *ctx; 2699 2700 ctx = new_omp_context (stmt, outer_ctx); 2701 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx); 2702 scan_omp (gimple_omp_body_ptr (stmt), ctx); 2703 } 2704 2705 /* Scan an OpenMP single directive. */ 2706 2707 static void 2708 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx) 2709 { 2710 omp_context *ctx; 2711 tree name; 2712 2713 ctx = new_omp_context (stmt, outer_ctx); 2714 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0); 2715 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE); 2716 name = create_tmp_var_name (".omp_copy_s"); 2717 name = build_decl (gimple_location (stmt), 2718 TYPE_DECL, name, ctx->record_type); 2719 TYPE_NAME (ctx->record_type) = name; 2720 2721 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx); 2722 scan_omp (gimple_omp_body_ptr (stmt), ctx); 2723 2724 if (TYPE_FIELDS (ctx->record_type) == NULL) 2725 ctx->record_type = NULL; 2726 else 2727 layout_type (ctx->record_type); 2728 } 2729 2730 /* Scan a GIMPLE_OMP_TARGET. */ 2731 2732 static void 2733 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx) 2734 { 2735 omp_context *ctx; 2736 tree name; 2737 bool offloaded = is_gimple_omp_offloaded (stmt); 2738 tree clauses = gimple_omp_target_clauses (stmt); 2739 2740 ctx = new_omp_context (stmt, outer_ctx); 2741 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0); 2742 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE); 2743 name = create_tmp_var_name (".omp_data_t"); 2744 name = build_decl (gimple_location (stmt), 2745 TYPE_DECL, name, ctx->record_type); 2746 DECL_ARTIFICIAL (name) = 1; 2747 DECL_NAMELESS (name) = 1; 2748 TYPE_NAME (ctx->record_type) = name; 2749 TYPE_ARTIFICIAL (ctx->record_type) = 1; 2750 2751 if (offloaded) 2752 { 2753 create_omp_child_function (ctx, false); 2754 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn); 2755 } 2756 2757 scan_sharing_clauses (clauses, ctx); 2758 scan_omp (gimple_omp_body_ptr (stmt), ctx); 2759 2760 if (TYPE_FIELDS (ctx->record_type) == NULL) 2761 ctx->record_type = ctx->receiver_decl = NULL; 2762 else 2763 { 2764 TYPE_FIELDS (ctx->record_type) 2765 = nreverse (TYPE_FIELDS (ctx->record_type)); 2766 if (flag_checking) 2767 { 2768 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type)); 2769 for (tree field = TYPE_FIELDS (ctx->record_type); 2770 field; 2771 field = DECL_CHAIN (field)) 2772 gcc_assert (DECL_ALIGN (field) == align); 2773 } 2774 layout_type (ctx->record_type); 2775 if (offloaded) 2776 fixup_child_record_type (ctx); 2777 } 2778 } 2779 2780 /* Scan an OpenMP teams directive. */ 2781 2782 static void 2783 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx) 2784 { 2785 omp_context *ctx = new_omp_context (stmt, outer_ctx); 2786 2787 if (!gimple_omp_teams_host (stmt)) 2788 { 2789 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx); 2790 scan_omp (gimple_omp_body_ptr (stmt), ctx); 2791 return; 2792 } 2793 taskreg_contexts.safe_push (ctx); 2794 gcc_assert (taskreg_nesting_level == 1); 2795 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0); 2796 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE); 2797 tree name = create_tmp_var_name (".omp_data_s"); 2798 name = build_decl (gimple_location (stmt), 2799 TYPE_DECL, name, ctx->record_type); 2800 DECL_ARTIFICIAL (name) = 1; 2801 DECL_NAMELESS (name) = 1; 2802 TYPE_NAME (ctx->record_type) = name; 2803 TYPE_ARTIFICIAL (ctx->record_type) = 1; 2804 create_omp_child_function (ctx, false); 2805 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn); 2806 2807 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx); 2808 scan_omp (gimple_omp_body_ptr (stmt), ctx); 2809 2810 if (TYPE_FIELDS (ctx->record_type) == NULL) 2811 ctx->record_type = ctx->receiver_decl = NULL; 2812 } 2813 2814 /* Check nesting restrictions. */ 2815 static bool 2816 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx) 2817 { 2818 tree c; 2819 2820 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY) 2821 /* GRID_BODY is an artificial construct, nesting rules will be checked in 2822 the original copy of its contents. */ 2823 return true; 2824 2825 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin) 2826 inside an OpenACC CTX. */ 2827 if (!(is_gimple_omp (stmt) 2828 && is_gimple_omp_oacc (stmt)) 2829 /* Except for atomic codes that we share with OpenMP. */ 2830 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD 2831 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)) 2832 { 2833 if (oacc_get_fn_attrib (cfun->decl) != NULL) 2834 { 2835 error_at (gimple_location (stmt), 2836 "non-OpenACC construct inside of OpenACC routine"); 2837 return false; 2838 } 2839 else 2840 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer) 2841 if (is_gimple_omp (octx->stmt) 2842 && is_gimple_omp_oacc (octx->stmt)) 2843 { 2844 error_at (gimple_location (stmt), 2845 "non-OpenACC construct inside of OpenACC region"); 2846 return false; 2847 } 2848 } 2849 2850 if (ctx != NULL) 2851 { 2852 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN 2853 && ctx->outer 2854 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR) 2855 ctx = ctx->outer; 2856 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR 2857 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD 2858 && !ctx->loop_p) 2859 { 2860 c = NULL_TREE; 2861 if (ctx->order_concurrent 2862 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED 2863 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD 2864 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)) 2865 { 2866 error_at (gimple_location (stmt), 2867 "OpenMP constructs other than %<parallel%>, %<loop%>" 2868 " or %<simd%> may not be nested inside a region with" 2869 " the %<order(concurrent)%> clause"); 2870 return false; 2871 } 2872 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED) 2873 { 2874 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)); 2875 if (omp_find_clause (c, OMP_CLAUSE_SIMD)) 2876 { 2877 if (omp_find_clause (c, OMP_CLAUSE_THREADS) 2878 && (ctx->outer == NULL 2879 || !gimple_omp_for_combined_into_p (ctx->stmt) 2880 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR 2881 || (gimple_omp_for_kind (ctx->outer->stmt) 2882 != GF_OMP_FOR_KIND_FOR) 2883 || !gimple_omp_for_combined_p (ctx->outer->stmt))) 2884 { 2885 error_at (gimple_location (stmt), 2886 "%<ordered simd threads%> must be closely " 2887 "nested inside of %<for simd%> region"); 2888 return false; 2889 } 2890 return true; 2891 } 2892 } 2893 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD 2894 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE 2895 || gimple_code (stmt) == GIMPLE_OMP_SCAN) 2896 return true; 2897 else if (gimple_code (stmt) == GIMPLE_OMP_FOR 2898 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD) 2899 return true; 2900 error_at (gimple_location (stmt), 2901 "OpenMP constructs other than " 2902 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may " 2903 "not be nested inside %<simd%> region"); 2904 return false; 2905 } 2906 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS) 2907 { 2908 if ((gimple_code (stmt) != GIMPLE_OMP_FOR 2909 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE 2910 && gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP 2911 && omp_find_clause (gimple_omp_for_clauses (stmt), 2912 OMP_CLAUSE_BIND) == NULL_TREE)) 2913 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL) 2914 { 2915 error_at (gimple_location (stmt), 2916 "only %<distribute%>, %<parallel%> or %<loop%> " 2917 "regions are allowed to be strictly nested inside " 2918 "%<teams%> region"); 2919 return false; 2920 } 2921 } 2922 else if (ctx->order_concurrent 2923 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL 2924 && (gimple_code (stmt) != GIMPLE_OMP_FOR 2925 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD) 2926 && gimple_code (stmt) != GIMPLE_OMP_SCAN) 2927 { 2928 if (ctx->loop_p) 2929 error_at (gimple_location (stmt), 2930 "OpenMP constructs other than %<parallel%>, %<loop%> or " 2931 "%<simd%> may not be nested inside a %<loop%> region"); 2932 else 2933 error_at (gimple_location (stmt), 2934 "OpenMP constructs other than %<parallel%>, %<loop%> or " 2935 "%<simd%> may not be nested inside a region with " 2936 "the %<order(concurrent)%> clause"); 2937 return false; 2938 } 2939 } 2940 switch (gimple_code (stmt)) 2941 { 2942 case GIMPLE_OMP_FOR: 2943 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD) 2944 return true; 2945 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE) 2946 { 2947 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS) 2948 { 2949 error_at (gimple_location (stmt), 2950 "%<distribute%> region must be strictly nested " 2951 "inside %<teams%> construct"); 2952 return false; 2953 } 2954 return true; 2955 } 2956 /* We split taskloop into task and nested taskloop in it. */ 2957 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP) 2958 return true; 2959 /* For now, hope this will change and loop bind(parallel) will not 2960 be allowed in lots of contexts. */ 2961 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR 2962 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND)) 2963 return true; 2964 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP) 2965 { 2966 bool ok = false; 2967 2968 if (ctx) 2969 switch (gimple_code (ctx->stmt)) 2970 { 2971 case GIMPLE_OMP_FOR: 2972 ok = (gimple_omp_for_kind (ctx->stmt) 2973 == GF_OMP_FOR_KIND_OACC_LOOP); 2974 break; 2975 2976 case GIMPLE_OMP_TARGET: 2977 switch (gimple_omp_target_kind (ctx->stmt)) 2978 { 2979 case GF_OMP_TARGET_KIND_OACC_PARALLEL: 2980 case GF_OMP_TARGET_KIND_OACC_KERNELS: 2981 case GF_OMP_TARGET_KIND_OACC_SERIAL: 2982 ok = true; 2983 break; 2984 2985 default: 2986 break; 2987 } 2988 2989 default: 2990 break; 2991 } 2992 else if (oacc_get_fn_attrib (current_function_decl)) 2993 ok = true; 2994 if (!ok) 2995 { 2996 error_at (gimple_location (stmt), 2997 "OpenACC loop directive must be associated with" 2998 " an OpenACC compute region"); 2999 return false; 3000 } 3001 } 3002 /* FALLTHRU */ 3003 case GIMPLE_CALL: 3004 if (is_gimple_call (stmt) 3005 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)) 3006 == BUILT_IN_GOMP_CANCEL 3007 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)) 3008 == BUILT_IN_GOMP_CANCELLATION_POINT)) 3009 { 3010 const char *bad = NULL; 3011 const char *kind = NULL; 3012 const char *construct 3013 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)) 3014 == BUILT_IN_GOMP_CANCEL) 3015 ? "cancel" 3016 : "cancellation point"; 3017 if (ctx == NULL) 3018 { 3019 error_at (gimple_location (stmt), "orphaned %qs construct", 3020 construct); 3021 return false; 3022 } 3023 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0)) 3024 ? tree_to_shwi (gimple_call_arg (stmt, 0)) 3025 : 0) 3026 { 3027 case 1: 3028 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL) 3029 bad = "parallel"; 3030 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)) 3031 == BUILT_IN_GOMP_CANCEL 3032 && !integer_zerop (gimple_call_arg (stmt, 1))) 3033 ctx->cancellable = true; 3034 kind = "parallel"; 3035 break; 3036 case 2: 3037 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR 3038 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR) 3039 bad = "for"; 3040 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)) 3041 == BUILT_IN_GOMP_CANCEL 3042 && !integer_zerop (gimple_call_arg (stmt, 1))) 3043 { 3044 ctx->cancellable = true; 3045 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt), 3046 OMP_CLAUSE_NOWAIT)) 3047 warning_at (gimple_location (stmt), 0, 3048 "%<cancel for%> inside " 3049 "%<nowait%> for construct"); 3050 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt), 3051 OMP_CLAUSE_ORDERED)) 3052 warning_at (gimple_location (stmt), 0, 3053 "%<cancel for%> inside " 3054 "%<ordered%> for construct"); 3055 } 3056 kind = "for"; 3057 break; 3058 case 4: 3059 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS 3060 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION) 3061 bad = "sections"; 3062 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)) 3063 == BUILT_IN_GOMP_CANCEL 3064 && !integer_zerop (gimple_call_arg (stmt, 1))) 3065 { 3066 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS) 3067 { 3068 ctx->cancellable = true; 3069 if (omp_find_clause (gimple_omp_sections_clauses 3070 (ctx->stmt), 3071 OMP_CLAUSE_NOWAIT)) 3072 warning_at (gimple_location (stmt), 0, 3073 "%<cancel sections%> inside " 3074 "%<nowait%> sections construct"); 3075 } 3076 else 3077 { 3078 gcc_assert (ctx->outer 3079 && gimple_code (ctx->outer->stmt) 3080 == GIMPLE_OMP_SECTIONS); 3081 ctx->outer->cancellable = true; 3082 if (omp_find_clause (gimple_omp_sections_clauses 3083 (ctx->outer->stmt), 3084 OMP_CLAUSE_NOWAIT)) 3085 warning_at (gimple_location (stmt), 0, 3086 "%<cancel sections%> inside " 3087 "%<nowait%> sections construct"); 3088 } 3089 } 3090 kind = "sections"; 3091 break; 3092 case 8: 3093 if (!is_task_ctx (ctx) 3094 && (!is_taskloop_ctx (ctx) 3095 || ctx->outer == NULL 3096 || !is_task_ctx (ctx->outer))) 3097 bad = "task"; 3098 else 3099 { 3100 for (omp_context *octx = ctx->outer; 3101 octx; octx = octx->outer) 3102 { 3103 switch (gimple_code (octx->stmt)) 3104 { 3105 case GIMPLE_OMP_TASKGROUP: 3106 break; 3107 case GIMPLE_OMP_TARGET: 3108 if (gimple_omp_target_kind (octx->stmt) 3109 != GF_OMP_TARGET_KIND_REGION) 3110 continue; 3111 /* FALLTHRU */ 3112 case GIMPLE_OMP_PARALLEL: 3113 case GIMPLE_OMP_TEAMS: 3114 error_at (gimple_location (stmt), 3115 "%<%s taskgroup%> construct not closely " 3116 "nested inside of %<taskgroup%> region", 3117 construct); 3118 return false; 3119 case GIMPLE_OMP_TASK: 3120 if (gimple_omp_task_taskloop_p (octx->stmt) 3121 && octx->outer 3122 && is_taskloop_ctx (octx->outer)) 3123 { 3124 tree clauses 3125 = gimple_omp_for_clauses (octx->outer->stmt); 3126 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP)) 3127 break; 3128 } 3129 continue; 3130 default: 3131 continue; 3132 } 3133 break; 3134 } 3135 ctx->cancellable = true; 3136 } 3137 kind = "taskgroup"; 3138 break; 3139 default: 3140 error_at (gimple_location (stmt), "invalid arguments"); 3141 return false; 3142 } 3143 if (bad) 3144 { 3145 error_at (gimple_location (stmt), 3146 "%<%s %s%> construct not closely nested inside of %qs", 3147 construct, kind, bad); 3148 return false; 3149 } 3150 } 3151 /* FALLTHRU */ 3152 case GIMPLE_OMP_SECTIONS: 3153 case GIMPLE_OMP_SINGLE: 3154 for (; ctx != NULL; ctx = ctx->outer) 3155 switch (gimple_code (ctx->stmt)) 3156 { 3157 case GIMPLE_OMP_FOR: 3158 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR 3159 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP) 3160 break; 3161 /* FALLTHRU */ 3162 case GIMPLE_OMP_SECTIONS: 3163 case GIMPLE_OMP_SINGLE: 3164 case GIMPLE_OMP_ORDERED: 3165 case GIMPLE_OMP_MASTER: 3166 case GIMPLE_OMP_TASK: 3167 case GIMPLE_OMP_CRITICAL: 3168 if (is_gimple_call (stmt)) 3169 { 3170 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)) 3171 != BUILT_IN_GOMP_BARRIER) 3172 return true; 3173 error_at (gimple_location (stmt), 3174 "barrier region may not be closely nested inside " 3175 "of work-sharing, %<loop%>, %<critical%>, " 3176 "%<ordered%>, %<master%>, explicit %<task%> or " 3177 "%<taskloop%> region"); 3178 return false; 3179 } 3180 error_at (gimple_location (stmt), 3181 "work-sharing region may not be closely nested inside " 3182 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, " 3183 "%<master%>, explicit %<task%> or %<taskloop%> region"); 3184 return false; 3185 case GIMPLE_OMP_PARALLEL: 3186 case GIMPLE_OMP_TEAMS: 3187 return true; 3188 case GIMPLE_OMP_TARGET: 3189 if (gimple_omp_target_kind (ctx->stmt) 3190 == GF_OMP_TARGET_KIND_REGION) 3191 return true; 3192 break; 3193 default: 3194 break; 3195 } 3196 break; 3197 case GIMPLE_OMP_MASTER: 3198 for (; ctx != NULL; ctx = ctx->outer) 3199 switch (gimple_code (ctx->stmt)) 3200 { 3201 case GIMPLE_OMP_FOR: 3202 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR 3203 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP) 3204 break; 3205 /* FALLTHRU */ 3206 case GIMPLE_OMP_SECTIONS: 3207 case GIMPLE_OMP_SINGLE: 3208 case GIMPLE_OMP_TASK: 3209 error_at (gimple_location (stmt), 3210 "%<master%> region may not be closely nested inside " 3211 "of work-sharing, %<loop%>, explicit %<task%> or " 3212 "%<taskloop%> region"); 3213 return false; 3214 case GIMPLE_OMP_PARALLEL: 3215 case GIMPLE_OMP_TEAMS: 3216 return true; 3217 case GIMPLE_OMP_TARGET: 3218 if (gimple_omp_target_kind (ctx->stmt) 3219 == GF_OMP_TARGET_KIND_REGION) 3220 return true; 3221 break; 3222 default: 3223 break; 3224 } 3225 break; 3226 case GIMPLE_OMP_TASK: 3227 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c)) 3228 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND 3229 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE 3230 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)) 3231 { 3232 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c); 3233 error_at (OMP_CLAUSE_LOCATION (c), 3234 "%<depend(%s)%> is only allowed in %<omp ordered%>", 3235 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink"); 3236 return false; 3237 } 3238 break; 3239 case GIMPLE_OMP_ORDERED: 3240 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)); 3241 c; c = OMP_CLAUSE_CHAIN (c)) 3242 { 3243 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND) 3244 { 3245 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS 3246 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD); 3247 continue; 3248 } 3249 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c); 3250 if (kind == OMP_CLAUSE_DEPEND_SOURCE 3251 || kind == OMP_CLAUSE_DEPEND_SINK) 3252 { 3253 tree oclause; 3254 /* Look for containing ordered(N) loop. */ 3255 if (ctx == NULL 3256 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR 3257 || (oclause 3258 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt), 3259 OMP_CLAUSE_ORDERED)) == NULL_TREE) 3260 { 3261 error_at (OMP_CLAUSE_LOCATION (c), 3262 "%<ordered%> construct with %<depend%> clause " 3263 "must be closely nested inside an %<ordered%> " 3264 "loop"); 3265 return false; 3266 } 3267 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE) 3268 { 3269 error_at (OMP_CLAUSE_LOCATION (c), 3270 "%<ordered%> construct with %<depend%> clause " 3271 "must be closely nested inside a loop with " 3272 "%<ordered%> clause with a parameter"); 3273 return false; 3274 } 3275 } 3276 else 3277 { 3278 error_at (OMP_CLAUSE_LOCATION (c), 3279 "invalid depend kind in omp %<ordered%> %<depend%>"); 3280 return false; 3281 } 3282 } 3283 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt)); 3284 if (omp_find_clause (c, OMP_CLAUSE_SIMD)) 3285 { 3286 /* ordered simd must be closely nested inside of simd region, 3287 and simd region must not encounter constructs other than 3288 ordered simd, therefore ordered simd may be either orphaned, 3289 or ctx->stmt must be simd. The latter case is handled already 3290 earlier. */ 3291 if (ctx != NULL) 3292 { 3293 error_at (gimple_location (stmt), 3294 "%<ordered%> %<simd%> must be closely nested inside " 3295 "%<simd%> region"); 3296 return false; 3297 } 3298 } 3299 for (; ctx != NULL; ctx = ctx->outer) 3300 switch (gimple_code (ctx->stmt)) 3301 { 3302 case GIMPLE_OMP_CRITICAL: 3303 case GIMPLE_OMP_TASK: 3304 case GIMPLE_OMP_ORDERED: 3305 ordered_in_taskloop: 3306 error_at (gimple_location (stmt), 3307 "%<ordered%> region may not be closely nested inside " 3308 "of %<critical%>, %<ordered%>, explicit %<task%> or " 3309 "%<taskloop%> region"); 3310 return false; 3311 case GIMPLE_OMP_FOR: 3312 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP) 3313 goto ordered_in_taskloop; 3314 tree o; 3315 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt), 3316 OMP_CLAUSE_ORDERED); 3317 if (o == NULL) 3318 { 3319 error_at (gimple_location (stmt), 3320 "%<ordered%> region must be closely nested inside " 3321 "a loop region with an %<ordered%> clause"); 3322 return false; 3323 } 3324 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE 3325 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE) 3326 { 3327 error_at (gimple_location (stmt), 3328 "%<ordered%> region without %<depend%> clause may " 3329 "not be closely nested inside a loop region with " 3330 "an %<ordered%> clause with a parameter"); 3331 return false; 3332 } 3333 return true; 3334 case GIMPLE_OMP_TARGET: 3335 if (gimple_omp_target_kind (ctx->stmt) 3336 != GF_OMP_TARGET_KIND_REGION) 3337 break; 3338 /* FALLTHRU */ 3339 case GIMPLE_OMP_PARALLEL: 3340 case GIMPLE_OMP_TEAMS: 3341 error_at (gimple_location (stmt), 3342 "%<ordered%> region must be closely nested inside " 3343 "a loop region with an %<ordered%> clause"); 3344 return false; 3345 default: 3346 break; 3347 } 3348 break; 3349 case GIMPLE_OMP_CRITICAL: 3350 { 3351 tree this_stmt_name 3352 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt)); 3353 for (; ctx != NULL; ctx = ctx->outer) 3354 if (gomp_critical *other_crit 3355 = dyn_cast <gomp_critical *> (ctx->stmt)) 3356 if (this_stmt_name == gimple_omp_critical_name (other_crit)) 3357 { 3358 error_at (gimple_location (stmt), 3359 "%<critical%> region may not be nested inside " 3360 "a %<critical%> region with the same name"); 3361 return false; 3362 } 3363 } 3364 break; 3365 case GIMPLE_OMP_TEAMS: 3366 if (ctx == NULL) 3367 break; 3368 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET 3369 || (gimple_omp_target_kind (ctx->stmt) 3370 != GF_OMP_TARGET_KIND_REGION)) 3371 { 3372 /* Teams construct can appear either strictly nested inside of 3373 target construct with no intervening stmts, or can be encountered 3374 only by initial task (so must not appear inside any OpenMP 3375 construct. */ 3376 error_at (gimple_location (stmt), 3377 "%<teams%> construct must be closely nested inside of " 3378 "%<target%> construct or not nested in any OpenMP " 3379 "construct"); 3380 return false; 3381 } 3382 break; 3383 case GIMPLE_OMP_TARGET: 3384 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c)) 3385 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND 3386 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE 3387 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)) 3388 { 3389 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c); 3390 error_at (OMP_CLAUSE_LOCATION (c), 3391 "%<depend(%s)%> is only allowed in %<omp ordered%>", 3392 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink"); 3393 return false; 3394 } 3395 if (is_gimple_omp_offloaded (stmt) 3396 && oacc_get_fn_attrib (cfun->decl) != NULL) 3397 { 3398 error_at (gimple_location (stmt), 3399 "OpenACC region inside of OpenACC routine, nested " 3400 "parallelism not supported yet"); 3401 return false; 3402 } 3403 for (; ctx != NULL; ctx = ctx->outer) 3404 { 3405 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET) 3406 { 3407 if (is_gimple_omp (stmt) 3408 && is_gimple_omp_oacc (stmt) 3409 && is_gimple_omp (ctx->stmt)) 3410 { 3411 error_at (gimple_location (stmt), 3412 "OpenACC construct inside of non-OpenACC region"); 3413 return false; 3414 } 3415 continue; 3416 } 3417 3418 const char *stmt_name, *ctx_stmt_name; 3419 switch (gimple_omp_target_kind (stmt)) 3420 { 3421 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break; 3422 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break; 3423 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break; 3424 case GF_OMP_TARGET_KIND_ENTER_DATA: 3425 stmt_name = "target enter data"; break; 3426 case GF_OMP_TARGET_KIND_EXIT_DATA: 3427 stmt_name = "target exit data"; break; 3428 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break; 3429 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break; 3430 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break; 3431 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break; 3432 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break; 3433 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA: 3434 stmt_name = "enter/exit data"; break; 3435 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break; 3436 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data"; 3437 break; 3438 default: gcc_unreachable (); 3439 } 3440 switch (gimple_omp_target_kind (ctx->stmt)) 3441 { 3442 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break; 3443 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break; 3444 case GF_OMP_TARGET_KIND_OACC_PARALLEL: 3445 ctx_stmt_name = "parallel"; break; 3446 case GF_OMP_TARGET_KIND_OACC_KERNELS: 3447 ctx_stmt_name = "kernels"; break; 3448 case GF_OMP_TARGET_KIND_OACC_SERIAL: 3449 ctx_stmt_name = "serial"; break; 3450 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break; 3451 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: 3452 ctx_stmt_name = "host_data"; break; 3453 default: gcc_unreachable (); 3454 } 3455 3456 /* OpenACC/OpenMP mismatch? */ 3457 if (is_gimple_omp_oacc (stmt) 3458 != is_gimple_omp_oacc (ctx->stmt)) 3459 { 3460 error_at (gimple_location (stmt), 3461 "%s %qs construct inside of %s %qs region", 3462 (is_gimple_omp_oacc (stmt) 3463 ? "OpenACC" : "OpenMP"), stmt_name, 3464 (is_gimple_omp_oacc (ctx->stmt) 3465 ? "OpenACC" : "OpenMP"), ctx_stmt_name); 3466 return false; 3467 } 3468 if (is_gimple_omp_offloaded (ctx->stmt)) 3469 { 3470 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */ 3471 if (is_gimple_omp_oacc (ctx->stmt)) 3472 { 3473 error_at (gimple_location (stmt), 3474 "%qs construct inside of %qs region", 3475 stmt_name, ctx_stmt_name); 3476 return false; 3477 } 3478 else 3479 { 3480 warning_at (gimple_location (stmt), 0, 3481 "%qs construct inside of %qs region", 3482 stmt_name, ctx_stmt_name); 3483 } 3484 } 3485 } 3486 break; 3487 default: 3488 break; 3489 } 3490 return true; 3491 } 3492 3493 3494 /* Helper function scan_omp. 3495 3496 Callback for walk_tree or operators in walk_gimple_stmt used to 3497 scan for OMP directives in TP. */ 3498 3499 static tree 3500 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data) 3501 { 3502 struct walk_stmt_info *wi = (struct walk_stmt_info *) data; 3503 omp_context *ctx = (omp_context *) wi->info; 3504 tree t = *tp; 3505 3506 switch (TREE_CODE (t)) 3507 { 3508 case VAR_DECL: 3509 case PARM_DECL: 3510 case LABEL_DECL: 3511 case RESULT_DECL: 3512 if (ctx) 3513 { 3514 tree repl = remap_decl (t, &ctx->cb); 3515 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK); 3516 *tp = repl; 3517 } 3518 break; 3519 3520 default: 3521 if (ctx && TYPE_P (t)) 3522 *tp = remap_type (t, &ctx->cb); 3523 else if (!DECL_P (t)) 3524 { 3525 *walk_subtrees = 1; 3526 if (ctx) 3527 { 3528 tree tem = remap_type (TREE_TYPE (t), &ctx->cb); 3529 if (tem != TREE_TYPE (t)) 3530 { 3531 if (TREE_CODE (t) == INTEGER_CST) 3532 *tp = wide_int_to_tree (tem, wi::to_wide (t)); 3533 else 3534 TREE_TYPE (t) = tem; 3535 } 3536 } 3537 } 3538 break; 3539 } 3540 3541 return NULL_TREE; 3542 } 3543 3544 /* Return true if FNDECL is a setjmp or a longjmp. */ 3545 3546 static bool 3547 setjmp_or_longjmp_p (const_tree fndecl) 3548 { 3549 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP) 3550 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP)) 3551 return true; 3552 3553 tree declname = DECL_NAME (fndecl); 3554 if (!declname 3555 || (DECL_CONTEXT (fndecl) != NULL_TREE 3556 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL) 3557 || !TREE_PUBLIC (fndecl)) 3558 return false; 3559 3560 const char *name = IDENTIFIER_POINTER (declname); 3561 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp"); 3562 } 3563 3564 /* Return true if FNDECL is an omp_* runtime API call. */ 3565 3566 static bool 3567 omp_runtime_api_call (const_tree fndecl) 3568 { 3569 tree declname = DECL_NAME (fndecl); 3570 if (!declname 3571 || (DECL_CONTEXT (fndecl) != NULL_TREE 3572 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL) 3573 || !TREE_PUBLIC (fndecl)) 3574 return false; 3575 3576 const char *name = IDENTIFIER_POINTER (declname); 3577 if (strncmp (name, "omp_", 4) != 0) 3578 return false; 3579 3580 static const char *omp_runtime_apis[] = 3581 { 3582 /* This array has 3 sections. First omp_* calls that don't 3583 have any suffixes. */ 3584 "target_alloc", 3585 "target_associate_ptr", 3586 "target_disassociate_ptr", 3587 "target_free", 3588 "target_is_present", 3589 "target_memcpy", 3590 "target_memcpy_rect", 3591 NULL, 3592 /* Now omp_* calls that are available as omp_* and omp_*_. */ 3593 "capture_affinity", 3594 "destroy_lock", 3595 "destroy_nest_lock", 3596 "display_affinity", 3597 "get_active_level", 3598 "get_affinity_format", 3599 "get_cancellation", 3600 "get_default_device", 3601 "get_dynamic", 3602 "get_initial_device", 3603 "get_level", 3604 "get_max_active_levels", 3605 "get_max_task_priority", 3606 "get_max_threads", 3607 "get_nested", 3608 "get_num_devices", 3609 "get_num_places", 3610 "get_num_procs", 3611 "get_num_teams", 3612 "get_num_threads", 3613 "get_partition_num_places", 3614 "get_place_num", 3615 "get_proc_bind", 3616 "get_team_num", 3617 "get_thread_limit", 3618 "get_thread_num", 3619 "get_wtick", 3620 "get_wtime", 3621 "in_final", 3622 "in_parallel", 3623 "init_lock", 3624 "init_nest_lock", 3625 "is_initial_device", 3626 "pause_resource", 3627 "pause_resource_all", 3628 "set_affinity_format", 3629 "set_lock", 3630 "set_nest_lock", 3631 "test_lock", 3632 "test_nest_lock", 3633 "unset_lock", 3634 "unset_nest_lock", 3635 NULL, 3636 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */ 3637 "get_ancestor_thread_num", 3638 "get_partition_place_nums", 3639 "get_place_num_procs", 3640 "get_place_proc_ids", 3641 "get_schedule", 3642 "get_team_size", 3643 "set_default_device", 3644 "set_dynamic", 3645 "set_max_active_levels", 3646 "set_nested", 3647 "set_num_threads", 3648 "set_schedule" 3649 }; 3650 3651 int mode = 0; 3652 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++) 3653 { 3654 if (omp_runtime_apis[i] == NULL) 3655 { 3656 mode++; 3657 continue; 3658 } 3659 size_t len = strlen (omp_runtime_apis[i]); 3660 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0 3661 && (name[4 + len] == '\0' 3662 || (mode > 0 3663 && name[4 + len] == '_' 3664 && (name[4 + len + 1] == '\0' 3665 || (mode > 1 3666 && strcmp (name + 4 + len + 1, "8_") == 0))))) 3667 return true; 3668 } 3669 return false; 3670 } 3671 3672 /* Helper function for scan_omp. 3673 3674 Callback for walk_gimple_stmt used to scan for OMP directives in 3675 the current statement in GSI. */ 3676 3677 static tree 3678 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p, 3679 struct walk_stmt_info *wi) 3680 { 3681 gimple *stmt = gsi_stmt (*gsi); 3682 omp_context *ctx = (omp_context *) wi->info; 3683 3684 if (gimple_has_location (stmt)) 3685 input_location = gimple_location (stmt); 3686 3687 /* Check the nesting restrictions. */ 3688 bool remove = false; 3689 if (is_gimple_omp (stmt)) 3690 remove = !check_omp_nesting_restrictions (stmt, ctx); 3691 else if (is_gimple_call (stmt)) 3692 { 3693 tree fndecl = gimple_call_fndecl (stmt); 3694 if (fndecl) 3695 { 3696 if (ctx 3697 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR 3698 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD 3699 && setjmp_or_longjmp_p (fndecl) 3700 && !ctx->loop_p) 3701 { 3702 remove = true; 3703 error_at (gimple_location (stmt), 3704 "setjmp/longjmp inside %<simd%> construct"); 3705 } 3706 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) 3707 switch (DECL_FUNCTION_CODE (fndecl)) 3708 { 3709 case BUILT_IN_GOMP_BARRIER: 3710 case BUILT_IN_GOMP_CANCEL: 3711 case BUILT_IN_GOMP_CANCELLATION_POINT: 3712 case BUILT_IN_GOMP_TASKYIELD: 3713 case BUILT_IN_GOMP_TASKWAIT: 3714 case BUILT_IN_GOMP_TASKGROUP_START: 3715 case BUILT_IN_GOMP_TASKGROUP_END: 3716 remove = !check_omp_nesting_restrictions (stmt, ctx); 3717 break; 3718 default: 3719 break; 3720 } 3721 else if (ctx) 3722 { 3723 omp_context *octx = ctx; 3724 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer) 3725 octx = ctx->outer; 3726 if (octx->order_concurrent && omp_runtime_api_call (fndecl)) 3727 { 3728 remove = true; 3729 error_at (gimple_location (stmt), 3730 "OpenMP runtime API call %qD in a region with " 3731 "%<order(concurrent)%> clause", fndecl); 3732 } 3733 } 3734 } 3735 } 3736 if (remove) 3737 { 3738 stmt = gimple_build_nop (); 3739 gsi_replace (gsi, stmt, false); 3740 } 3741 3742 *handled_ops_p = true; 3743 3744 switch (gimple_code (stmt)) 3745 { 3746 case GIMPLE_OMP_PARALLEL: 3747 taskreg_nesting_level++; 3748 scan_omp_parallel (gsi, ctx); 3749 taskreg_nesting_level--; 3750 break; 3751 3752 case GIMPLE_OMP_TASK: 3753 taskreg_nesting_level++; 3754 scan_omp_task (gsi, ctx); 3755 taskreg_nesting_level--; 3756 break; 3757 3758 case GIMPLE_OMP_FOR: 3759 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt)) 3760 == GF_OMP_FOR_KIND_SIMD) 3761 && gimple_omp_for_combined_into_p (stmt) 3762 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN) 3763 { 3764 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt)); 3765 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION); 3766 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ()) 3767 { 3768 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx); 3769 break; 3770 } 3771 } 3772 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt)) 3773 == GF_OMP_FOR_KIND_SIMD) 3774 && omp_maybe_offloaded_ctx (ctx) 3775 && omp_max_simt_vf ()) 3776 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx); 3777 else 3778 scan_omp_for (as_a <gomp_for *> (stmt), ctx); 3779 break; 3780 3781 case GIMPLE_OMP_SECTIONS: 3782 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx); 3783 break; 3784 3785 case GIMPLE_OMP_SINGLE: 3786 scan_omp_single (as_a <gomp_single *> (stmt), ctx); 3787 break; 3788 3789 case GIMPLE_OMP_SCAN: 3790 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt))) 3791 { 3792 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE) 3793 ctx->scan_inclusive = true; 3794 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE) 3795 ctx->scan_exclusive = true; 3796 } 3797 /* FALLTHRU */ 3798 case GIMPLE_OMP_SECTION: 3799 case GIMPLE_OMP_MASTER: 3800 case GIMPLE_OMP_ORDERED: 3801 case GIMPLE_OMP_CRITICAL: 3802 case GIMPLE_OMP_GRID_BODY: 3803 ctx = new_omp_context (stmt, ctx); 3804 scan_omp (gimple_omp_body_ptr (stmt), ctx); 3805 break; 3806 3807 case GIMPLE_OMP_TASKGROUP: 3808 ctx = new_omp_context (stmt, ctx); 3809 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx); 3810 scan_omp (gimple_omp_body_ptr (stmt), ctx); 3811 break; 3812 3813 case GIMPLE_OMP_TARGET: 3814 if (is_gimple_omp_offloaded (stmt)) 3815 { 3816 taskreg_nesting_level++; 3817 scan_omp_target (as_a <gomp_target *> (stmt), ctx); 3818 taskreg_nesting_level--; 3819 } 3820 else 3821 scan_omp_target (as_a <gomp_target *> (stmt), ctx); 3822 break; 3823 3824 case GIMPLE_OMP_TEAMS: 3825 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt))) 3826 { 3827 taskreg_nesting_level++; 3828 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx); 3829 taskreg_nesting_level--; 3830 } 3831 else 3832 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx); 3833 break; 3834 3835 case GIMPLE_BIND: 3836 { 3837 tree var; 3838 3839 *handled_ops_p = false; 3840 if (ctx) 3841 for (var = gimple_bind_vars (as_a <gbind *> (stmt)); 3842 var ; 3843 var = DECL_CHAIN (var)) 3844 insert_decl_map (&ctx->cb, var, var); 3845 } 3846 break; 3847 default: 3848 *handled_ops_p = false; 3849 break; 3850 } 3851 3852 return NULL_TREE; 3853 } 3854 3855 3856 /* Scan all the statements starting at the current statement. CTX 3857 contains context information about the OMP directives and 3858 clauses found during the scan. */ 3859 3860 static void 3861 scan_omp (gimple_seq *body_p, omp_context *ctx) 3862 { 3863 location_t saved_location; 3864 struct walk_stmt_info wi; 3865 3866 memset (&wi, 0, sizeof (wi)); 3867 wi.info = ctx; 3868 wi.want_locations = true; 3869 3870 saved_location = input_location; 3871 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi); 3872 input_location = saved_location; 3873 } 3874 3875 /* Re-gimplification and code generation routines. */ 3876 3877 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars 3878 of BIND if in a method. */ 3879 3880 static void 3881 maybe_remove_omp_member_access_dummy_vars (gbind *bind) 3882 { 3883 if (DECL_ARGUMENTS (current_function_decl) 3884 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl)) 3885 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl))) 3886 == POINTER_TYPE)) 3887 { 3888 tree vars = gimple_bind_vars (bind); 3889 for (tree *pvar = &vars; *pvar; ) 3890 if (omp_member_access_dummy_var (*pvar)) 3891 *pvar = DECL_CHAIN (*pvar); 3892 else 3893 pvar = &DECL_CHAIN (*pvar); 3894 gimple_bind_set_vars (bind, vars); 3895 } 3896 } 3897 3898 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of 3899 block and its subblocks. */ 3900 3901 static void 3902 remove_member_access_dummy_vars (tree block) 3903 { 3904 for (tree *pvar = &BLOCK_VARS (block); *pvar; ) 3905 if (omp_member_access_dummy_var (*pvar)) 3906 *pvar = DECL_CHAIN (*pvar); 3907 else 3908 pvar = &DECL_CHAIN (*pvar); 3909 3910 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block)) 3911 remove_member_access_dummy_vars (block); 3912 } 3913 3914 /* If a context was created for STMT when it was scanned, return it. */ 3915 3916 static omp_context * 3917 maybe_lookup_ctx (gimple *stmt) 3918 { 3919 splay_tree_node n; 3920 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt); 3921 return n ? (omp_context *) n->value : NULL; 3922 } 3923 3924 3925 /* Find the mapping for DECL in CTX or the immediately enclosing 3926 context that has a mapping for DECL. 3927 3928 If CTX is a nested parallel directive, we may have to use the decl 3929 mappings created in CTX's parent context. Suppose that we have the 3930 following parallel nesting (variable UIDs showed for clarity): 3931 3932 iD.1562 = 0; 3933 #omp parallel shared(iD.1562) -> outer parallel 3934 iD.1562 = iD.1562 + 1; 3935 3936 #omp parallel shared (iD.1562) -> inner parallel 3937 iD.1562 = iD.1562 - 1; 3938 3939 Each parallel structure will create a distinct .omp_data_s structure 3940 for copying iD.1562 in/out of the directive: 3941 3942 outer parallel .omp_data_s.1.i -> iD.1562 3943 inner parallel .omp_data_s.2.i -> iD.1562 3944 3945 A shared variable mapping will produce a copy-out operation before 3946 the parallel directive and a copy-in operation after it. So, in 3947 this case we would have: 3948 3949 iD.1562 = 0; 3950 .omp_data_o.1.i = iD.1562; 3951 #omp parallel shared(iD.1562) -> outer parallel 3952 .omp_data_i.1 = &.omp_data_o.1 3953 .omp_data_i.1->i = .omp_data_i.1->i + 1; 3954 3955 .omp_data_o.2.i = iD.1562; -> ** 3956 #omp parallel shared(iD.1562) -> inner parallel 3957 .omp_data_i.2 = &.omp_data_o.2 3958 .omp_data_i.2->i = .omp_data_i.2->i - 1; 3959 3960 3961 ** This is a problem. The symbol iD.1562 cannot be referenced 3962 inside the body of the outer parallel region. But since we are 3963 emitting this copy operation while expanding the inner parallel 3964 directive, we need to access the CTX structure of the outer 3965 parallel directive to get the correct mapping: 3966 3967 .omp_data_o.2.i = .omp_data_i.1->i 3968 3969 Since there may be other workshare or parallel directives enclosing 3970 the parallel directive, it may be necessary to walk up the context 3971 parent chain. This is not a problem in general because nested 3972 parallelism happens only rarely. */ 3973 3974 static tree 3975 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx) 3976 { 3977 tree t; 3978 omp_context *up; 3979 3980 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer) 3981 t = maybe_lookup_decl (decl, up); 3982 3983 gcc_assert (!ctx->is_nested || t || is_global_var (decl)); 3984 3985 return t ? t : decl; 3986 } 3987 3988 3989 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found 3990 in outer contexts. */ 3991 3992 static tree 3993 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx) 3994 { 3995 tree t = NULL; 3996 omp_context *up; 3997 3998 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer) 3999 t = maybe_lookup_decl (decl, up); 4000 4001 return t ? t : decl; 4002 } 4003 4004 4005 /* Construct the initialization value for reduction operation OP. */ 4006 4007 tree 4008 omp_reduction_init_op (location_t loc, enum tree_code op, tree type) 4009 { 4010 switch (op) 4011 { 4012 case PLUS_EXPR: 4013 case MINUS_EXPR: 4014 case BIT_IOR_EXPR: 4015 case BIT_XOR_EXPR: 4016 case TRUTH_OR_EXPR: 4017 case TRUTH_ORIF_EXPR: 4018 case TRUTH_XOR_EXPR: 4019 case NE_EXPR: 4020 return build_zero_cst (type); 4021 4022 case MULT_EXPR: 4023 case TRUTH_AND_EXPR: 4024 case TRUTH_ANDIF_EXPR: 4025 case EQ_EXPR: 4026 return fold_convert_loc (loc, type, integer_one_node); 4027 4028 case BIT_AND_EXPR: 4029 return fold_convert_loc (loc, type, integer_minus_one_node); 4030 4031 case MAX_EXPR: 4032 if (SCALAR_FLOAT_TYPE_P (type)) 4033 { 4034 REAL_VALUE_TYPE max, min; 4035 if (HONOR_INFINITIES (type)) 4036 { 4037 real_inf (&max); 4038 real_arithmetic (&min, NEGATE_EXPR, &max, NULL); 4039 } 4040 else 4041 real_maxval (&min, 1, TYPE_MODE (type)); 4042 return build_real (type, min); 4043 } 4044 else if (POINTER_TYPE_P (type)) 4045 { 4046 wide_int min 4047 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type)); 4048 return wide_int_to_tree (type, min); 4049 } 4050 else 4051 { 4052 gcc_assert (INTEGRAL_TYPE_P (type)); 4053 return TYPE_MIN_VALUE (type); 4054 } 4055 4056 case MIN_EXPR: 4057 if (SCALAR_FLOAT_TYPE_P (type)) 4058 { 4059 REAL_VALUE_TYPE max; 4060 if (HONOR_INFINITIES (type)) 4061 real_inf (&max); 4062 else 4063 real_maxval (&max, 0, TYPE_MODE (type)); 4064 return build_real (type, max); 4065 } 4066 else if (POINTER_TYPE_P (type)) 4067 { 4068 wide_int max 4069 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type)); 4070 return wide_int_to_tree (type, max); 4071 } 4072 else 4073 { 4074 gcc_assert (INTEGRAL_TYPE_P (type)); 4075 return TYPE_MAX_VALUE (type); 4076 } 4077 4078 default: 4079 gcc_unreachable (); 4080 } 4081 } 4082 4083 /* Construct the initialization value for reduction CLAUSE. */ 4084 4085 tree 4086 omp_reduction_init (tree clause, tree type) 4087 { 4088 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause), 4089 OMP_CLAUSE_REDUCTION_CODE (clause), type); 4090 } 4091 4092 /* Return alignment to be assumed for var in CLAUSE, which should be 4093 OMP_CLAUSE_ALIGNED. */ 4094 4095 static tree 4096 omp_clause_aligned_alignment (tree clause) 4097 { 4098 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause)) 4099 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause); 4100 4101 /* Otherwise return implementation defined alignment. */ 4102 unsigned int al = 1; 4103 opt_scalar_mode mode_iter; 4104 auto_vector_modes modes; 4105 targetm.vectorize.autovectorize_vector_modes (&modes, true); 4106 static enum mode_class classes[] 4107 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT }; 4108 for (int i = 0; i < 4; i += 2) 4109 /* The for loop above dictates that we only walk through scalar classes. */ 4110 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i]) 4111 { 4112 scalar_mode mode = mode_iter.require (); 4113 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode); 4114 if (GET_MODE_CLASS (vmode) != classes[i + 1]) 4115 continue; 4116 machine_mode alt_vmode; 4117 for (unsigned int j = 0; j < modes.length (); ++j) 4118 if (related_vector_mode (modes[j], mode).exists (&alt_vmode) 4119 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode))) 4120 vmode = alt_vmode; 4121 4122 tree type = lang_hooks.types.type_for_mode (mode, 1); 4123 if (type == NULL_TREE || TYPE_MODE (type) != mode) 4124 continue; 4125 type = build_vector_type_for_mode (type, vmode); 4126 if (TYPE_MODE (type) != vmode) 4127 continue; 4128 if (TYPE_ALIGN_UNIT (type) > al) 4129 al = TYPE_ALIGN_UNIT (type); 4130 } 4131 return build_int_cst (integer_type_node, al); 4132 } 4133 4134 4135 /* This structure is part of the interface between lower_rec_simd_input_clauses 4136 and lower_rec_input_clauses. */ 4137 4138 class omplow_simd_context { 4139 public: 4140 omplow_simd_context () { memset (this, 0, sizeof (*this)); } 4141 tree idx; 4142 tree lane; 4143 tree lastlane; 4144 vec<tree, va_heap> simt_eargs; 4145 gimple_seq simt_dlist; 4146 poly_uint64_pod max_vf; 4147 bool is_simt; 4148 }; 4149 4150 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd 4151 privatization. */ 4152 4153 static bool 4154 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx, 4155 omplow_simd_context *sctx, tree &ivar, 4156 tree &lvar, tree *rvar = NULL, 4157 tree *rvar2 = NULL) 4158 { 4159 if (known_eq (sctx->max_vf, 0U)) 4160 { 4161 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf (); 4162 if (maybe_gt (sctx->max_vf, 1U)) 4163 { 4164 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt), 4165 OMP_CLAUSE_SAFELEN); 4166 if (c) 4167 { 4168 poly_uint64 safe_len; 4169 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len) 4170 || maybe_lt (safe_len, 1U)) 4171 sctx->max_vf = 1; 4172 else 4173 sctx->max_vf = lower_bound (sctx->max_vf, safe_len); 4174 } 4175 } 4176 if (maybe_gt (sctx->max_vf, 1U)) 4177 { 4178 sctx->idx = create_tmp_var (unsigned_type_node); 4179 sctx->lane = create_tmp_var (unsigned_type_node); 4180 } 4181 } 4182 if (known_eq (sctx->max_vf, 1U)) 4183 return false; 4184 4185 if (sctx->is_simt) 4186 { 4187 if (is_gimple_reg (new_var)) 4188 { 4189 ivar = lvar = new_var; 4190 return true; 4191 } 4192 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type); 4193 ivar = lvar = create_tmp_var (type); 4194 TREE_ADDRESSABLE (ivar) = 1; 4195 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"), 4196 NULL, DECL_ATTRIBUTES (ivar)); 4197 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar)); 4198 tree clobber = build_clobber (type); 4199 gimple *g = gimple_build_assign (ivar, clobber); 4200 gimple_seq_add_stmt (&sctx->simt_dlist, g); 4201 } 4202 else 4203 { 4204 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf); 4205 tree avar = create_tmp_var_raw (atype); 4206 if (TREE_ADDRESSABLE (new_var)) 4207 TREE_ADDRESSABLE (avar) = 1; 4208 DECL_ATTRIBUTES (avar) 4209 = tree_cons (get_identifier ("omp simd array"), NULL, 4210 DECL_ATTRIBUTES (avar)); 4211 gimple_add_tmp_var (avar); 4212 tree iavar = avar; 4213 if (rvar && !ctx->for_simd_scan_phase) 4214 { 4215 /* For inscan reductions, create another array temporary, 4216 which will hold the reduced value. */ 4217 iavar = create_tmp_var_raw (atype); 4218 if (TREE_ADDRESSABLE (new_var)) 4219 TREE_ADDRESSABLE (iavar) = 1; 4220 DECL_ATTRIBUTES (iavar) 4221 = tree_cons (get_identifier ("omp simd array"), NULL, 4222 tree_cons (get_identifier ("omp simd inscan"), NULL, 4223 DECL_ATTRIBUTES (iavar))); 4224 gimple_add_tmp_var (iavar); 4225 ctx->cb.decl_map->put (avar, iavar); 4226 if (sctx->lastlane == NULL_TREE) 4227 sctx->lastlane = create_tmp_var (unsigned_type_node); 4228 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, 4229 sctx->lastlane, NULL_TREE, NULL_TREE); 4230 TREE_THIS_NOTRAP (*rvar) = 1; 4231 4232 if (ctx->scan_exclusive) 4233 { 4234 /* And for exclusive scan yet another one, which will 4235 hold the value during the scan phase. */ 4236 tree savar = create_tmp_var_raw (atype); 4237 if (TREE_ADDRESSABLE (new_var)) 4238 TREE_ADDRESSABLE (savar) = 1; 4239 DECL_ATTRIBUTES (savar) 4240 = tree_cons (get_identifier ("omp simd array"), NULL, 4241 tree_cons (get_identifier ("omp simd inscan " 4242 "exclusive"), NULL, 4243 DECL_ATTRIBUTES (savar))); 4244 gimple_add_tmp_var (savar); 4245 ctx->cb.decl_map->put (iavar, savar); 4246 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar, 4247 sctx->idx, NULL_TREE, NULL_TREE); 4248 TREE_THIS_NOTRAP (*rvar2) = 1; 4249 } 4250 } 4251 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx, 4252 NULL_TREE, NULL_TREE); 4253 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane, 4254 NULL_TREE, NULL_TREE); 4255 TREE_THIS_NOTRAP (ivar) = 1; 4256 TREE_THIS_NOTRAP (lvar) = 1; 4257 } 4258 if (DECL_P (new_var)) 4259 { 4260 SET_DECL_VALUE_EXPR (new_var, lvar); 4261 DECL_HAS_VALUE_EXPR_P (new_var) = 1; 4262 } 4263 return true; 4264 } 4265 4266 /* Helper function of lower_rec_input_clauses. For a reference 4267 in simd reduction, add an underlying variable it will reference. */ 4268 4269 static void 4270 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist) 4271 { 4272 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard))); 4273 if (TREE_CONSTANT (z)) 4274 { 4275 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)), 4276 get_name (new_vard)); 4277 gimple_add_tmp_var (z); 4278 TREE_ADDRESSABLE (z) = 1; 4279 z = build_fold_addr_expr_loc (loc, z); 4280 gimplify_assign (new_vard, z, ilist); 4281 } 4282 } 4283 4284 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence 4285 code to emit (type) (tskred_temp[idx]). */ 4286 4287 static tree 4288 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type, 4289 unsigned idx) 4290 { 4291 unsigned HOST_WIDE_INT sz 4292 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node)); 4293 tree r = build2 (MEM_REF, pointer_sized_int_node, 4294 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp), 4295 idx * sz)); 4296 tree v = create_tmp_var (pointer_sized_int_node); 4297 gimple *g = gimple_build_assign (v, r); 4298 gimple_seq_add_stmt (ilist, g); 4299 if (!useless_type_conversion_p (type, pointer_sized_int_node)) 4300 { 4301 v = create_tmp_var (type); 4302 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g)); 4303 gimple_seq_add_stmt (ilist, g); 4304 } 4305 return v; 4306 } 4307 4308 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN, 4309 from the receiver (aka child) side and initializers for REFERENCE_TYPE 4310 private variables. Initialization statements go in ILIST, while calls 4311 to destructors go in DLIST. */ 4312 4313 static void 4314 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist, 4315 omp_context *ctx, struct omp_for_data *fd) 4316 { 4317 tree c, copyin_seq, x, ptr; 4318 bool copyin_by_ref = false; 4319 bool lastprivate_firstprivate = false; 4320 bool reduction_omp_orig_ref = false; 4321 int pass; 4322 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR 4323 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD); 4324 omplow_simd_context sctx = omplow_simd_context (); 4325 tree simt_lane = NULL_TREE, simtrec = NULL_TREE; 4326 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE; 4327 gimple_seq llist[4] = { }; 4328 tree nonconst_simd_if = NULL_TREE; 4329 4330 copyin_seq = NULL; 4331 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_); 4332 4333 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops 4334 with data sharing clauses referencing variable sized vars. That 4335 is unnecessarily hard to support and very unlikely to result in 4336 vectorized code anyway. */ 4337 if (is_simd) 4338 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) 4339 switch (OMP_CLAUSE_CODE (c)) 4340 { 4341 case OMP_CLAUSE_LINEAR: 4342 if (OMP_CLAUSE_LINEAR_ARRAY (c)) 4343 sctx.max_vf = 1; 4344 /* FALLTHRU */ 4345 case OMP_CLAUSE_PRIVATE: 4346 case OMP_CLAUSE_FIRSTPRIVATE: 4347 case OMP_CLAUSE_LASTPRIVATE: 4348 if (is_variable_sized (OMP_CLAUSE_DECL (c))) 4349 sctx.max_vf = 1; 4350 else if (omp_is_reference (OMP_CLAUSE_DECL (c))) 4351 { 4352 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c))); 4353 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype))) 4354 sctx.max_vf = 1; 4355 } 4356 break; 4357 case OMP_CLAUSE_REDUCTION: 4358 case OMP_CLAUSE_IN_REDUCTION: 4359 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF 4360 || is_variable_sized (OMP_CLAUSE_DECL (c))) 4361 sctx.max_vf = 1; 4362 else if (omp_is_reference (OMP_CLAUSE_DECL (c))) 4363 { 4364 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c))); 4365 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype))) 4366 sctx.max_vf = 1; 4367 } 4368 break; 4369 case OMP_CLAUSE_IF: 4370 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c))) 4371 sctx.max_vf = 1; 4372 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST) 4373 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c); 4374 break; 4375 case OMP_CLAUSE_SIMDLEN: 4376 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c))) 4377 sctx.max_vf = 1; 4378 break; 4379 case OMP_CLAUSE__CONDTEMP_: 4380 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */ 4381 if (sctx.is_simt) 4382 sctx.max_vf = 1; 4383 break; 4384 default: 4385 continue; 4386 } 4387 4388 /* Add a placeholder for simduid. */ 4389 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U)) 4390 sctx.simt_eargs.safe_push (NULL_TREE); 4391 4392 unsigned task_reduction_cnt = 0; 4393 unsigned task_reduction_cntorig = 0; 4394 unsigned task_reduction_cnt_full = 0; 4395 unsigned task_reduction_cntorig_full = 0; 4396 unsigned task_reduction_other_cnt = 0; 4397 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE; 4398 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE; 4399 /* Do all the fixed sized types in the first pass, and the variable sized 4400 types in the second pass. This makes sure that the scalar arguments to 4401 the variable sized types are processed before we use them in the 4402 variable sized operations. For task reductions we use 4 passes, in the 4403 first two we ignore them, in the third one gather arguments for 4404 GOMP_task_reduction_remap call and in the last pass actually handle 4405 the task reductions. */ 4406 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt) 4407 ? 4 : 2); ++pass) 4408 { 4409 if (pass == 2 && task_reduction_cnt) 4410 { 4411 tskred_atype 4412 = build_array_type_nelts (ptr_type_node, task_reduction_cnt 4413 + task_reduction_cntorig); 4414 tskred_avar = create_tmp_var_raw (tskred_atype); 4415 gimple_add_tmp_var (tskred_avar); 4416 TREE_ADDRESSABLE (tskred_avar) = 1; 4417 task_reduction_cnt_full = task_reduction_cnt; 4418 task_reduction_cntorig_full = task_reduction_cntorig; 4419 } 4420 else if (pass == 3 && task_reduction_cnt) 4421 { 4422 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP); 4423 gimple *g 4424 = gimple_build_call (x, 3, size_int (task_reduction_cnt), 4425 size_int (task_reduction_cntorig), 4426 build_fold_addr_expr (tskred_avar)); 4427 gimple_seq_add_stmt (ilist, g); 4428 } 4429 if (pass == 3 && task_reduction_other_cnt) 4430 { 4431 /* For reduction clauses, build 4432 tskred_base = (void *) tskred_temp[2] 4433 + omp_get_thread_num () * tskred_temp[1] 4434 or if tskred_temp[1] is known to be constant, that constant 4435 directly. This is the start of the private reduction copy block 4436 for the current thread. */ 4437 tree v = create_tmp_var (integer_type_node); 4438 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM); 4439 gimple *g = gimple_build_call (x, 0); 4440 gimple_call_set_lhs (g, v); 4441 gimple_seq_add_stmt (ilist, g); 4442 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_); 4443 tskred_temp = OMP_CLAUSE_DECL (c); 4444 if (is_taskreg_ctx (ctx)) 4445 tskred_temp = lookup_decl (tskred_temp, ctx); 4446 tree v2 = create_tmp_var (sizetype); 4447 g = gimple_build_assign (v2, NOP_EXPR, v); 4448 gimple_seq_add_stmt (ilist, g); 4449 if (ctx->task_reductions[0]) 4450 v = fold_convert (sizetype, ctx->task_reductions[0]); 4451 else 4452 v = task_reduction_read (ilist, tskred_temp, sizetype, 1); 4453 tree v3 = create_tmp_var (sizetype); 4454 g = gimple_build_assign (v3, MULT_EXPR, v2, v); 4455 gimple_seq_add_stmt (ilist, g); 4456 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2); 4457 tskred_base = create_tmp_var (ptr_type_node); 4458 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3); 4459 gimple_seq_add_stmt (ilist, g); 4460 } 4461 task_reduction_cnt = 0; 4462 task_reduction_cntorig = 0; 4463 task_reduction_other_cnt = 0; 4464 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) 4465 { 4466 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c); 4467 tree var, new_var; 4468 bool by_ref; 4469 location_t clause_loc = OMP_CLAUSE_LOCATION (c); 4470 bool task_reduction_p = false; 4471 bool task_reduction_needs_orig_p = false; 4472 tree cond = NULL_TREE; 4473 4474 switch (c_kind) 4475 { 4476 case OMP_CLAUSE_PRIVATE: 4477 if (OMP_CLAUSE_PRIVATE_DEBUG (c)) 4478 continue; 4479 break; 4480 case OMP_CLAUSE_SHARED: 4481 /* Ignore shared directives in teams construct inside 4482 of target construct. */ 4483 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS 4484 && !is_host_teams_ctx (ctx)) 4485 continue; 4486 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL) 4487 { 4488 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) 4489 || is_global_var (OMP_CLAUSE_DECL (c))); 4490 continue; 4491 } 4492 case OMP_CLAUSE_FIRSTPRIVATE: 4493 case OMP_CLAUSE_COPYIN: 4494 break; 4495 case OMP_CLAUSE_LINEAR: 4496 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c) 4497 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 4498 lastprivate_firstprivate = true; 4499 break; 4500 case OMP_CLAUSE_REDUCTION: 4501 case OMP_CLAUSE_IN_REDUCTION: 4502 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c)) 4503 { 4504 task_reduction_p = true; 4505 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION) 4506 { 4507 task_reduction_other_cnt++; 4508 if (pass == 2) 4509 continue; 4510 } 4511 else 4512 task_reduction_cnt++; 4513 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)) 4514 { 4515 var = OMP_CLAUSE_DECL (c); 4516 /* If var is a global variable that isn't privatized 4517 in outer contexts, we don't need to look up the 4518 original address, it is always the address of the 4519 global variable itself. */ 4520 if (!DECL_P (var) 4521 || omp_is_reference (var) 4522 || !is_global_var 4523 (maybe_lookup_decl_in_outer_ctx (var, ctx))) 4524 { 4525 task_reduction_needs_orig_p = true; 4526 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION) 4527 task_reduction_cntorig++; 4528 } 4529 } 4530 } 4531 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)) 4532 reduction_omp_orig_ref = true; 4533 break; 4534 case OMP_CLAUSE__REDUCTEMP_: 4535 if (!is_taskreg_ctx (ctx)) 4536 continue; 4537 /* FALLTHRU */ 4538 case OMP_CLAUSE__LOOPTEMP_: 4539 /* Handle _looptemp_/_reductemp_ clauses only on 4540 parallel/task. */ 4541 if (fd) 4542 continue; 4543 break; 4544 case OMP_CLAUSE_LASTPRIVATE: 4545 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)) 4546 { 4547 lastprivate_firstprivate = true; 4548 if (pass != 0 || is_taskloop_ctx (ctx)) 4549 continue; 4550 } 4551 /* Even without corresponding firstprivate, if 4552 decl is Fortran allocatable, it needs outer var 4553 reference. */ 4554 else if (pass == 0 4555 && lang_hooks.decls.omp_private_outer_ref 4556 (OMP_CLAUSE_DECL (c))) 4557 lastprivate_firstprivate = true; 4558 break; 4559 case OMP_CLAUSE_ALIGNED: 4560 if (pass != 1) 4561 continue; 4562 var = OMP_CLAUSE_DECL (c); 4563 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE 4564 && !is_global_var (var)) 4565 { 4566 new_var = maybe_lookup_decl (var, ctx); 4567 if (new_var == NULL_TREE) 4568 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx); 4569 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED); 4570 tree alarg = omp_clause_aligned_alignment (c); 4571 alarg = fold_convert_loc (clause_loc, size_type_node, alarg); 4572 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg); 4573 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x); 4574 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x); 4575 gimplify_and_add (x, ilist); 4576 } 4577 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE 4578 && is_global_var (var)) 4579 { 4580 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2; 4581 new_var = lookup_decl (var, ctx); 4582 t = maybe_lookup_decl_in_outer_ctx (var, ctx); 4583 t = build_fold_addr_expr_loc (clause_loc, t); 4584 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED); 4585 tree alarg = omp_clause_aligned_alignment (c); 4586 alarg = fold_convert_loc (clause_loc, size_type_node, alarg); 4587 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg); 4588 t = fold_convert_loc (clause_loc, ptype, t); 4589 x = create_tmp_var (ptype); 4590 t = build2 (MODIFY_EXPR, ptype, x, t); 4591 gimplify_and_add (t, ilist); 4592 t = build_simple_mem_ref_loc (clause_loc, x); 4593 SET_DECL_VALUE_EXPR (new_var, t); 4594 DECL_HAS_VALUE_EXPR_P (new_var) = 1; 4595 } 4596 continue; 4597 case OMP_CLAUSE__CONDTEMP_: 4598 if (is_parallel_ctx (ctx) 4599 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))) 4600 break; 4601 continue; 4602 default: 4603 continue; 4604 } 4605 4606 if (task_reduction_p != (pass >= 2)) 4607 continue; 4608 4609 new_var = var = OMP_CLAUSE_DECL (c); 4610 if ((c_kind == OMP_CLAUSE_REDUCTION 4611 || c_kind == OMP_CLAUSE_IN_REDUCTION) 4612 && TREE_CODE (var) == MEM_REF) 4613 { 4614 var = TREE_OPERAND (var, 0); 4615 if (TREE_CODE (var) == POINTER_PLUS_EXPR) 4616 var = TREE_OPERAND (var, 0); 4617 if (TREE_CODE (var) == INDIRECT_REF 4618 || TREE_CODE (var) == ADDR_EXPR) 4619 var = TREE_OPERAND (var, 0); 4620 if (is_variable_sized (var)) 4621 { 4622 gcc_assert (DECL_HAS_VALUE_EXPR_P (var)); 4623 var = DECL_VALUE_EXPR (var); 4624 gcc_assert (TREE_CODE (var) == INDIRECT_REF); 4625 var = TREE_OPERAND (var, 0); 4626 gcc_assert (DECL_P (var)); 4627 } 4628 new_var = var; 4629 } 4630 if (c_kind != OMP_CLAUSE_COPYIN) 4631 new_var = lookup_decl (var, ctx); 4632 4633 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN) 4634 { 4635 if (pass != 0) 4636 continue; 4637 } 4638 /* C/C++ array section reductions. */ 4639 else if ((c_kind == OMP_CLAUSE_REDUCTION 4640 || c_kind == OMP_CLAUSE_IN_REDUCTION) 4641 && var != OMP_CLAUSE_DECL (c)) 4642 { 4643 if (pass == 0) 4644 continue; 4645 4646 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1); 4647 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0); 4648 4649 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR) 4650 { 4651 tree b = TREE_OPERAND (orig_var, 1); 4652 b = maybe_lookup_decl (b, ctx); 4653 if (b == NULL) 4654 { 4655 b = TREE_OPERAND (orig_var, 1); 4656 b = maybe_lookup_decl_in_outer_ctx (b, ctx); 4657 } 4658 if (integer_zerop (bias)) 4659 bias = b; 4660 else 4661 { 4662 bias = fold_convert_loc (clause_loc, 4663 TREE_TYPE (b), bias); 4664 bias = fold_build2_loc (clause_loc, PLUS_EXPR, 4665 TREE_TYPE (b), b, bias); 4666 } 4667 orig_var = TREE_OPERAND (orig_var, 0); 4668 } 4669 if (pass == 2) 4670 { 4671 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx); 4672 if (is_global_var (out) 4673 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE 4674 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE 4675 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out))) 4676 != POINTER_TYPE))) 4677 x = var; 4678 else 4679 { 4680 bool by_ref = use_pointer_for_field (var, NULL); 4681 x = build_receiver_ref (var, by_ref, ctx); 4682 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE 4683 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var))) 4684 == POINTER_TYPE)) 4685 x = build_fold_addr_expr (x); 4686 } 4687 if (TREE_CODE (orig_var) == INDIRECT_REF) 4688 x = build_simple_mem_ref (x); 4689 else if (TREE_CODE (orig_var) == ADDR_EXPR) 4690 { 4691 if (var == TREE_OPERAND (orig_var, 0)) 4692 x = build_fold_addr_expr (x); 4693 } 4694 bias = fold_convert (sizetype, bias); 4695 x = fold_convert (ptr_type_node, x); 4696 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR, 4697 TREE_TYPE (x), x, bias); 4698 unsigned cnt = task_reduction_cnt - 1; 4699 if (!task_reduction_needs_orig_p) 4700 cnt += (task_reduction_cntorig_full 4701 - task_reduction_cntorig); 4702 else 4703 cnt = task_reduction_cntorig - 1; 4704 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar, 4705 size_int (cnt), NULL_TREE, NULL_TREE); 4706 gimplify_assign (r, x, ilist); 4707 continue; 4708 } 4709 4710 if (TREE_CODE (orig_var) == INDIRECT_REF 4711 || TREE_CODE (orig_var) == ADDR_EXPR) 4712 orig_var = TREE_OPERAND (orig_var, 0); 4713 tree d = OMP_CLAUSE_DECL (c); 4714 tree type = TREE_TYPE (d); 4715 gcc_assert (TREE_CODE (type) == ARRAY_TYPE); 4716 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); 4717 const char *name = get_name (orig_var); 4718 if (pass == 3) 4719 { 4720 tree xv = create_tmp_var (ptr_type_node); 4721 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION) 4722 { 4723 unsigned cnt = task_reduction_cnt - 1; 4724 if (!task_reduction_needs_orig_p) 4725 cnt += (task_reduction_cntorig_full 4726 - task_reduction_cntorig); 4727 else 4728 cnt = task_reduction_cntorig - 1; 4729 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar, 4730 size_int (cnt), NULL_TREE, NULL_TREE); 4731 4732 gimple *g = gimple_build_assign (xv, x); 4733 gimple_seq_add_stmt (ilist, g); 4734 } 4735 else 4736 { 4737 unsigned int idx = *ctx->task_reduction_map->get (c); 4738 tree off; 4739 if (ctx->task_reductions[1 + idx]) 4740 off = fold_convert (sizetype, 4741 ctx->task_reductions[1 + idx]); 4742 else 4743 off = task_reduction_read (ilist, tskred_temp, sizetype, 4744 7 + 3 * idx + 1); 4745 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR, 4746 tskred_base, off); 4747 gimple_seq_add_stmt (ilist, g); 4748 } 4749 x = fold_convert (build_pointer_type (boolean_type_node), 4750 xv); 4751 if (TREE_CONSTANT (v)) 4752 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, 4753 TYPE_SIZE_UNIT (type)); 4754 else 4755 { 4756 tree t = maybe_lookup_decl (v, ctx); 4757 if (t) 4758 v = t; 4759 else 4760 v = maybe_lookup_decl_in_outer_ctx (v, ctx); 4761 gimplify_expr (&v, ilist, NULL, is_gimple_val, 4762 fb_rvalue); 4763 t = fold_build2_loc (clause_loc, PLUS_EXPR, 4764 TREE_TYPE (v), v, 4765 build_int_cst (TREE_TYPE (v), 1)); 4766 t = fold_build2_loc (clause_loc, MULT_EXPR, 4767 TREE_TYPE (v), t, 4768 TYPE_SIZE_UNIT (TREE_TYPE (type))); 4769 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t); 4770 } 4771 cond = create_tmp_var (TREE_TYPE (x)); 4772 gimplify_assign (cond, x, ilist); 4773 x = xv; 4774 } 4775 else if (TREE_CONSTANT (v)) 4776 { 4777 x = create_tmp_var_raw (type, name); 4778 gimple_add_tmp_var (x); 4779 TREE_ADDRESSABLE (x) = 1; 4780 x = build_fold_addr_expr_loc (clause_loc, x); 4781 } 4782 else 4783 { 4784 tree atmp 4785 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN); 4786 tree t = maybe_lookup_decl (v, ctx); 4787 if (t) 4788 v = t; 4789 else 4790 v = maybe_lookup_decl_in_outer_ctx (v, ctx); 4791 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue); 4792 t = fold_build2_loc (clause_loc, PLUS_EXPR, 4793 TREE_TYPE (v), v, 4794 build_int_cst (TREE_TYPE (v), 1)); 4795 t = fold_build2_loc (clause_loc, MULT_EXPR, 4796 TREE_TYPE (v), t, 4797 TYPE_SIZE_UNIT (TREE_TYPE (type))); 4798 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type))); 4799 x = build_call_expr_loc (clause_loc, atmp, 2, t, al); 4800 } 4801 4802 tree ptype = build_pointer_type (TREE_TYPE (type)); 4803 x = fold_convert_loc (clause_loc, ptype, x); 4804 tree y = create_tmp_var (ptype, name); 4805 gimplify_assign (y, x, ilist); 4806 x = y; 4807 tree yb = y; 4808 4809 if (!integer_zerop (bias)) 4810 { 4811 bias = fold_convert_loc (clause_loc, pointer_sized_int_node, 4812 bias); 4813 yb = fold_convert_loc (clause_loc, pointer_sized_int_node, 4814 x); 4815 yb = fold_build2_loc (clause_loc, MINUS_EXPR, 4816 pointer_sized_int_node, yb, bias); 4817 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb); 4818 yb = create_tmp_var (ptype, name); 4819 gimplify_assign (yb, x, ilist); 4820 x = yb; 4821 } 4822 4823 d = TREE_OPERAND (d, 0); 4824 if (TREE_CODE (d) == POINTER_PLUS_EXPR) 4825 d = TREE_OPERAND (d, 0); 4826 if (TREE_CODE (d) == ADDR_EXPR) 4827 { 4828 if (orig_var != var) 4829 { 4830 gcc_assert (is_variable_sized (orig_var)); 4831 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), 4832 x); 4833 gimplify_assign (new_var, x, ilist); 4834 tree new_orig_var = lookup_decl (orig_var, ctx); 4835 tree t = build_fold_indirect_ref (new_var); 4836 DECL_IGNORED_P (new_var) = 0; 4837 TREE_THIS_NOTRAP (t) = 1; 4838 SET_DECL_VALUE_EXPR (new_orig_var, t); 4839 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1; 4840 } 4841 else 4842 { 4843 x = build2 (MEM_REF, TREE_TYPE (new_var), x, 4844 build_int_cst (ptype, 0)); 4845 SET_DECL_VALUE_EXPR (new_var, x); 4846 DECL_HAS_VALUE_EXPR_P (new_var) = 1; 4847 } 4848 } 4849 else 4850 { 4851 gcc_assert (orig_var == var); 4852 if (TREE_CODE (d) == INDIRECT_REF) 4853 { 4854 x = create_tmp_var (ptype, name); 4855 TREE_ADDRESSABLE (x) = 1; 4856 gimplify_assign (x, yb, ilist); 4857 x = build_fold_addr_expr_loc (clause_loc, x); 4858 } 4859 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x); 4860 gimplify_assign (new_var, x, ilist); 4861 } 4862 /* GOMP_taskgroup_reduction_register memsets the whole 4863 array to zero. If the initializer is zero, we don't 4864 need to initialize it again, just mark it as ever 4865 used unconditionally, i.e. cond = true. */ 4866 if (cond 4867 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE 4868 && initializer_zerop (omp_reduction_init (c, 4869 TREE_TYPE (type)))) 4870 { 4871 gimple *g = gimple_build_assign (build_simple_mem_ref (cond), 4872 boolean_true_node); 4873 gimple_seq_add_stmt (ilist, g); 4874 continue; 4875 } 4876 tree end = create_artificial_label (UNKNOWN_LOCATION); 4877 if (cond) 4878 { 4879 gimple *g; 4880 if (!is_parallel_ctx (ctx)) 4881 { 4882 tree condv = create_tmp_var (boolean_type_node); 4883 g = gimple_build_assign (condv, 4884 build_simple_mem_ref (cond)); 4885 gimple_seq_add_stmt (ilist, g); 4886 tree lab1 = create_artificial_label (UNKNOWN_LOCATION); 4887 g = gimple_build_cond (NE_EXPR, condv, 4888 boolean_false_node, end, lab1); 4889 gimple_seq_add_stmt (ilist, g); 4890 gimple_seq_add_stmt (ilist, gimple_build_label (lab1)); 4891 } 4892 g = gimple_build_assign (build_simple_mem_ref (cond), 4893 boolean_true_node); 4894 gimple_seq_add_stmt (ilist, g); 4895 } 4896 4897 tree y1 = create_tmp_var (ptype); 4898 gimplify_assign (y1, y, ilist); 4899 tree i2 = NULL_TREE, y2 = NULL_TREE; 4900 tree body2 = NULL_TREE, end2 = NULL_TREE; 4901 tree y3 = NULL_TREE, y4 = NULL_TREE; 4902 if (task_reduction_needs_orig_p) 4903 { 4904 y3 = create_tmp_var (ptype); 4905 tree ref; 4906 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION) 4907 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar, 4908 size_int (task_reduction_cnt_full 4909 + task_reduction_cntorig - 1), 4910 NULL_TREE, NULL_TREE); 4911 else 4912 { 4913 unsigned int idx = *ctx->task_reduction_map->get (c); 4914 ref = task_reduction_read (ilist, tskred_temp, ptype, 4915 7 + 3 * idx); 4916 } 4917 gimplify_assign (y3, ref, ilist); 4918 } 4919 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd) 4920 { 4921 if (pass != 3) 4922 { 4923 y2 = create_tmp_var (ptype); 4924 gimplify_assign (y2, y, ilist); 4925 } 4926 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)) 4927 { 4928 tree ref = build_outer_var_ref (var, ctx); 4929 /* For ref build_outer_var_ref already performs this. */ 4930 if (TREE_CODE (d) == INDIRECT_REF) 4931 gcc_assert (omp_is_reference (var)); 4932 else if (TREE_CODE (d) == ADDR_EXPR) 4933 ref = build_fold_addr_expr (ref); 4934 else if (omp_is_reference (var)) 4935 ref = build_fold_addr_expr (ref); 4936 ref = fold_convert_loc (clause_loc, ptype, ref); 4937 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) 4938 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)) 4939 { 4940 y3 = create_tmp_var (ptype); 4941 gimplify_assign (y3, unshare_expr (ref), ilist); 4942 } 4943 if (is_simd) 4944 { 4945 y4 = create_tmp_var (ptype); 4946 gimplify_assign (y4, ref, dlist); 4947 } 4948 } 4949 } 4950 tree i = create_tmp_var (TREE_TYPE (v)); 4951 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist); 4952 tree body = create_artificial_label (UNKNOWN_LOCATION); 4953 gimple_seq_add_stmt (ilist, gimple_build_label (body)); 4954 if (y2) 4955 { 4956 i2 = create_tmp_var (TREE_TYPE (v)); 4957 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist); 4958 body2 = create_artificial_label (UNKNOWN_LOCATION); 4959 end2 = create_artificial_label (UNKNOWN_LOCATION); 4960 gimple_seq_add_stmt (dlist, gimple_build_label (body2)); 4961 } 4962 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 4963 { 4964 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); 4965 tree decl_placeholder 4966 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c); 4967 SET_DECL_VALUE_EXPR (decl_placeholder, 4968 build_simple_mem_ref (y1)); 4969 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1; 4970 SET_DECL_VALUE_EXPR (placeholder, 4971 y3 ? build_simple_mem_ref (y3) 4972 : error_mark_node); 4973 DECL_HAS_VALUE_EXPR_P (placeholder) = 1; 4974 x = lang_hooks.decls.omp_clause_default_ctor 4975 (c, build_simple_mem_ref (y1), 4976 y3 ? build_simple_mem_ref (y3) : NULL_TREE); 4977 if (x) 4978 gimplify_and_add (x, ilist); 4979 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)) 4980 { 4981 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c); 4982 lower_omp (&tseq, ctx); 4983 gimple_seq_add_seq (ilist, tseq); 4984 } 4985 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL; 4986 if (is_simd) 4987 { 4988 SET_DECL_VALUE_EXPR (decl_placeholder, 4989 build_simple_mem_ref (y2)); 4990 SET_DECL_VALUE_EXPR (placeholder, 4991 build_simple_mem_ref (y4)); 4992 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c); 4993 lower_omp (&tseq, ctx); 4994 gimple_seq_add_seq (dlist, tseq); 4995 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; 4996 } 4997 DECL_HAS_VALUE_EXPR_P (placeholder) = 0; 4998 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0; 4999 if (y2) 5000 { 5001 x = lang_hooks.decls.omp_clause_dtor 5002 (c, build_simple_mem_ref (y2)); 5003 if (x) 5004 gimplify_and_add (x, dlist); 5005 } 5006 } 5007 else 5008 { 5009 x = omp_reduction_init (c, TREE_TYPE (type)); 5010 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c); 5011 5012 /* reduction(-:var) sums up the partial results, so it 5013 acts identically to reduction(+:var). */ 5014 if (code == MINUS_EXPR) 5015 code = PLUS_EXPR; 5016 5017 gimplify_assign (build_simple_mem_ref (y1), x, ilist); 5018 if (is_simd) 5019 { 5020 x = build2 (code, TREE_TYPE (type), 5021 build_simple_mem_ref (y4), 5022 build_simple_mem_ref (y2)); 5023 gimplify_assign (build_simple_mem_ref (y4), x, dlist); 5024 } 5025 } 5026 gimple *g 5027 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1, 5028 TYPE_SIZE_UNIT (TREE_TYPE (type))); 5029 gimple_seq_add_stmt (ilist, g); 5030 if (y3) 5031 { 5032 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3, 5033 TYPE_SIZE_UNIT (TREE_TYPE (type))); 5034 gimple_seq_add_stmt (ilist, g); 5035 } 5036 g = gimple_build_assign (i, PLUS_EXPR, i, 5037 build_int_cst (TREE_TYPE (i), 1)); 5038 gimple_seq_add_stmt (ilist, g); 5039 g = gimple_build_cond (LE_EXPR, i, v, body, end); 5040 gimple_seq_add_stmt (ilist, g); 5041 gimple_seq_add_stmt (ilist, gimple_build_label (end)); 5042 if (y2) 5043 { 5044 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2, 5045 TYPE_SIZE_UNIT (TREE_TYPE (type))); 5046 gimple_seq_add_stmt (dlist, g); 5047 if (y4) 5048 { 5049 g = gimple_build_assign 5050 (y4, POINTER_PLUS_EXPR, y4, 5051 TYPE_SIZE_UNIT (TREE_TYPE (type))); 5052 gimple_seq_add_stmt (dlist, g); 5053 } 5054 g = gimple_build_assign (i2, PLUS_EXPR, i2, 5055 build_int_cst (TREE_TYPE (i2), 1)); 5056 gimple_seq_add_stmt (dlist, g); 5057 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2); 5058 gimple_seq_add_stmt (dlist, g); 5059 gimple_seq_add_stmt (dlist, gimple_build_label (end2)); 5060 } 5061 continue; 5062 } 5063 else if (pass == 2) 5064 { 5065 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))) 5066 x = var; 5067 else 5068 { 5069 bool by_ref = use_pointer_for_field (var, ctx); 5070 x = build_receiver_ref (var, by_ref, ctx); 5071 } 5072 if (!omp_is_reference (var)) 5073 x = build_fold_addr_expr (x); 5074 x = fold_convert (ptr_type_node, x); 5075 unsigned cnt = task_reduction_cnt - 1; 5076 if (!task_reduction_needs_orig_p) 5077 cnt += task_reduction_cntorig_full - task_reduction_cntorig; 5078 else 5079 cnt = task_reduction_cntorig - 1; 5080 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar, 5081 size_int (cnt), NULL_TREE, NULL_TREE); 5082 gimplify_assign (r, x, ilist); 5083 continue; 5084 } 5085 else if (pass == 3) 5086 { 5087 tree type = TREE_TYPE (new_var); 5088 if (!omp_is_reference (var)) 5089 type = build_pointer_type (type); 5090 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION) 5091 { 5092 unsigned cnt = task_reduction_cnt - 1; 5093 if (!task_reduction_needs_orig_p) 5094 cnt += (task_reduction_cntorig_full 5095 - task_reduction_cntorig); 5096 else 5097 cnt = task_reduction_cntorig - 1; 5098 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar, 5099 size_int (cnt), NULL_TREE, NULL_TREE); 5100 } 5101 else 5102 { 5103 unsigned int idx = *ctx->task_reduction_map->get (c); 5104 tree off; 5105 if (ctx->task_reductions[1 + idx]) 5106 off = fold_convert (sizetype, 5107 ctx->task_reductions[1 + idx]); 5108 else 5109 off = task_reduction_read (ilist, tskred_temp, sizetype, 5110 7 + 3 * idx + 1); 5111 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, 5112 tskred_base, off); 5113 } 5114 x = fold_convert (type, x); 5115 tree t; 5116 if (omp_is_reference (var)) 5117 { 5118 gimplify_assign (new_var, x, ilist); 5119 t = new_var; 5120 new_var = build_simple_mem_ref (new_var); 5121 } 5122 else 5123 { 5124 t = create_tmp_var (type); 5125 gimplify_assign (t, x, ilist); 5126 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t)); 5127 DECL_HAS_VALUE_EXPR_P (new_var) = 1; 5128 } 5129 t = fold_convert (build_pointer_type (boolean_type_node), t); 5130 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, 5131 TYPE_SIZE_UNIT (TREE_TYPE (type))); 5132 cond = create_tmp_var (TREE_TYPE (t)); 5133 gimplify_assign (cond, t, ilist); 5134 } 5135 else if (is_variable_sized (var)) 5136 { 5137 /* For variable sized types, we need to allocate the 5138 actual storage here. Call alloca and store the 5139 result in the pointer decl that we created elsewhere. */ 5140 if (pass == 0) 5141 continue; 5142 5143 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx)) 5144 { 5145 gcall *stmt; 5146 tree tmp, atmp; 5147 5148 ptr = DECL_VALUE_EXPR (new_var); 5149 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF); 5150 ptr = TREE_OPERAND (ptr, 0); 5151 gcc_assert (DECL_P (ptr)); 5152 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var)); 5153 5154 /* void *tmp = __builtin_alloca */ 5155 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN); 5156 stmt = gimple_build_call (atmp, 2, x, 5157 size_int (DECL_ALIGN (var))); 5158 cfun->calls_alloca = 1; 5159 tmp = create_tmp_var_raw (ptr_type_node); 5160 gimple_add_tmp_var (tmp); 5161 gimple_call_set_lhs (stmt, tmp); 5162 5163 gimple_seq_add_stmt (ilist, stmt); 5164 5165 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp); 5166 gimplify_assign (ptr, x, ilist); 5167 } 5168 } 5169 else if (omp_is_reference (var) 5170 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE 5171 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))) 5172 { 5173 /* For references that are being privatized for Fortran, 5174 allocate new backing storage for the new pointer 5175 variable. This allows us to avoid changing all the 5176 code that expects a pointer to something that expects 5177 a direct variable. */ 5178 if (pass == 0) 5179 continue; 5180 5181 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var))); 5182 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx)) 5183 { 5184 x = build_receiver_ref (var, false, ctx); 5185 x = build_fold_addr_expr_loc (clause_loc, x); 5186 } 5187 else if (TREE_CONSTANT (x)) 5188 { 5189 /* For reduction in SIMD loop, defer adding the 5190 initialization of the reference, because if we decide 5191 to use SIMD array for it, the initilization could cause 5192 expansion ICE. Ditto for other privatization clauses. */ 5193 if (is_simd) 5194 x = NULL_TREE; 5195 else 5196 { 5197 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)), 5198 get_name (var)); 5199 gimple_add_tmp_var (x); 5200 TREE_ADDRESSABLE (x) = 1; 5201 x = build_fold_addr_expr_loc (clause_loc, x); 5202 } 5203 } 5204 else 5205 { 5206 tree atmp 5207 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN); 5208 tree rtype = TREE_TYPE (TREE_TYPE (new_var)); 5209 tree al = size_int (TYPE_ALIGN (rtype)); 5210 x = build_call_expr_loc (clause_loc, atmp, 2, x, al); 5211 } 5212 5213 if (x) 5214 { 5215 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x); 5216 gimplify_assign (new_var, x, ilist); 5217 } 5218 5219 new_var = build_simple_mem_ref_loc (clause_loc, new_var); 5220 } 5221 else if ((c_kind == OMP_CLAUSE_REDUCTION 5222 || c_kind == OMP_CLAUSE_IN_REDUCTION) 5223 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 5224 { 5225 if (pass == 0) 5226 continue; 5227 } 5228 else if (pass != 0) 5229 continue; 5230 5231 switch (OMP_CLAUSE_CODE (c)) 5232 { 5233 case OMP_CLAUSE_SHARED: 5234 /* Ignore shared directives in teams construct inside 5235 target construct. */ 5236 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS 5237 && !is_host_teams_ctx (ctx)) 5238 continue; 5239 /* Shared global vars are just accessed directly. */ 5240 if (is_global_var (new_var)) 5241 break; 5242 /* For taskloop firstprivate/lastprivate, represented 5243 as firstprivate and shared clause on the task, new_var 5244 is the firstprivate var. */ 5245 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)) 5246 break; 5247 /* Set up the DECL_VALUE_EXPR for shared variables now. This 5248 needs to be delayed until after fixup_child_record_type so 5249 that we get the correct type during the dereference. */ 5250 by_ref = use_pointer_for_field (var, ctx); 5251 x = build_receiver_ref (var, by_ref, ctx); 5252 SET_DECL_VALUE_EXPR (new_var, x); 5253 DECL_HAS_VALUE_EXPR_P (new_var) = 1; 5254 5255 /* ??? If VAR is not passed by reference, and the variable 5256 hasn't been initialized yet, then we'll get a warning for 5257 the store into the omp_data_s structure. Ideally, we'd be 5258 able to notice this and not store anything at all, but 5259 we're generating code too early. Suppress the warning. */ 5260 if (!by_ref) 5261 TREE_NO_WARNING (var) = 1; 5262 break; 5263 5264 case OMP_CLAUSE__CONDTEMP_: 5265 if (is_parallel_ctx (ctx)) 5266 { 5267 x = build_receiver_ref (var, false, ctx); 5268 SET_DECL_VALUE_EXPR (new_var, x); 5269 DECL_HAS_VALUE_EXPR_P (new_var) = 1; 5270 } 5271 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)) 5272 { 5273 x = build_zero_cst (TREE_TYPE (var)); 5274 goto do_private; 5275 } 5276 break; 5277 5278 case OMP_CLAUSE_LASTPRIVATE: 5279 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)) 5280 break; 5281 /* FALLTHRU */ 5282 5283 case OMP_CLAUSE_PRIVATE: 5284 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE) 5285 x = build_outer_var_ref (var, ctx); 5286 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c)) 5287 { 5288 if (is_task_ctx (ctx)) 5289 x = build_receiver_ref (var, false, ctx); 5290 else 5291 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE); 5292 } 5293 else 5294 x = NULL; 5295 do_private: 5296 tree nx; 5297 bool copy_ctor; 5298 copy_ctor = false; 5299 nx = unshare_expr (new_var); 5300 if (is_simd 5301 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 5302 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)) 5303 copy_ctor = true; 5304 if (copy_ctor) 5305 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x); 5306 else 5307 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x); 5308 if (is_simd) 5309 { 5310 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var); 5311 if ((TREE_ADDRESSABLE (new_var) || nx || y 5312 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 5313 && (gimple_omp_for_collapse (ctx->stmt) != 1 5314 || (gimple_omp_for_index (ctx->stmt, 0) 5315 != new_var))) 5316 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_ 5317 || omp_is_reference (var)) 5318 && lower_rec_simd_input_clauses (new_var, ctx, &sctx, 5319 ivar, lvar)) 5320 { 5321 if (omp_is_reference (var)) 5322 { 5323 gcc_assert (TREE_CODE (new_var) == MEM_REF); 5324 tree new_vard = TREE_OPERAND (new_var, 0); 5325 gcc_assert (DECL_P (new_vard)); 5326 SET_DECL_VALUE_EXPR (new_vard, 5327 build_fold_addr_expr (lvar)); 5328 DECL_HAS_VALUE_EXPR_P (new_vard) = 1; 5329 } 5330 5331 if (nx) 5332 { 5333 tree iv = unshare_expr (ivar); 5334 if (copy_ctor) 5335 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, 5336 x); 5337 else 5338 x = lang_hooks.decls.omp_clause_default_ctor (c, 5339 iv, 5340 x); 5341 } 5342 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_) 5343 { 5344 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar), 5345 unshare_expr (ivar), x); 5346 nx = x; 5347 } 5348 if (nx && x) 5349 gimplify_and_add (x, &llist[0]); 5350 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 5351 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)) 5352 { 5353 tree v = new_var; 5354 if (!DECL_P (v)) 5355 { 5356 gcc_assert (TREE_CODE (v) == MEM_REF); 5357 v = TREE_OPERAND (v, 0); 5358 gcc_assert (DECL_P (v)); 5359 } 5360 v = *ctx->lastprivate_conditional_map->get (v); 5361 tree t = create_tmp_var (TREE_TYPE (v)); 5362 tree z = build_zero_cst (TREE_TYPE (v)); 5363 tree orig_v 5364 = build_outer_var_ref (var, ctx, 5365 OMP_CLAUSE_LASTPRIVATE); 5366 gimple_seq_add_stmt (dlist, 5367 gimple_build_assign (t, z)); 5368 gcc_assert (DECL_HAS_VALUE_EXPR_P (v)); 5369 tree civar = DECL_VALUE_EXPR (v); 5370 gcc_assert (TREE_CODE (civar) == ARRAY_REF); 5371 civar = unshare_expr (civar); 5372 TREE_OPERAND (civar, 1) = sctx.idx; 5373 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t, 5374 unshare_expr (civar)); 5375 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x, 5376 build2 (MODIFY_EXPR, TREE_TYPE (orig_v), 5377 orig_v, unshare_expr (ivar))); 5378 tree cond = build2 (LT_EXPR, boolean_type_node, t, 5379 civar); 5380 x = build3 (COND_EXPR, void_type_node, cond, x, 5381 void_node); 5382 gimple_seq tseq = NULL; 5383 gimplify_and_add (x, &tseq); 5384 if (ctx->outer) 5385 lower_omp (&tseq, ctx->outer); 5386 gimple_seq_add_seq (&llist[1], tseq); 5387 } 5388 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 5389 && ctx->for_simd_scan_phase) 5390 { 5391 x = unshare_expr (ivar); 5392 tree orig_v 5393 = build_outer_var_ref (var, ctx, 5394 OMP_CLAUSE_LASTPRIVATE); 5395 x = lang_hooks.decls.omp_clause_assign_op (c, x, 5396 orig_v); 5397 gimplify_and_add (x, &llist[0]); 5398 } 5399 if (y) 5400 { 5401 y = lang_hooks.decls.omp_clause_dtor (c, ivar); 5402 if (y) 5403 gimplify_and_add (y, &llist[1]); 5404 } 5405 break; 5406 } 5407 if (omp_is_reference (var)) 5408 { 5409 gcc_assert (TREE_CODE (new_var) == MEM_REF); 5410 tree new_vard = TREE_OPERAND (new_var, 0); 5411 gcc_assert (DECL_P (new_vard)); 5412 tree type = TREE_TYPE (TREE_TYPE (new_vard)); 5413 x = TYPE_SIZE_UNIT (type); 5414 if (TREE_CONSTANT (x)) 5415 { 5416 x = create_tmp_var_raw (type, get_name (var)); 5417 gimple_add_tmp_var (x); 5418 TREE_ADDRESSABLE (x) = 1; 5419 x = build_fold_addr_expr_loc (clause_loc, x); 5420 x = fold_convert_loc (clause_loc, 5421 TREE_TYPE (new_vard), x); 5422 gimplify_assign (new_vard, x, ilist); 5423 } 5424 } 5425 } 5426 if (nx) 5427 gimplify_and_add (nx, ilist); 5428 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 5429 && is_simd 5430 && ctx->for_simd_scan_phase) 5431 { 5432 tree orig_v = build_outer_var_ref (var, ctx, 5433 OMP_CLAUSE_LASTPRIVATE); 5434 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, 5435 orig_v); 5436 gimplify_and_add (x, ilist); 5437 } 5438 /* FALLTHRU */ 5439 5440 do_dtor: 5441 x = lang_hooks.decls.omp_clause_dtor (c, new_var); 5442 if (x) 5443 gimplify_and_add (x, dlist); 5444 break; 5445 5446 case OMP_CLAUSE_LINEAR: 5447 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)) 5448 goto do_firstprivate; 5449 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c)) 5450 x = NULL; 5451 else 5452 x = build_outer_var_ref (var, ctx); 5453 goto do_private; 5454 5455 case OMP_CLAUSE_FIRSTPRIVATE: 5456 if (is_task_ctx (ctx)) 5457 { 5458 if ((omp_is_reference (var) 5459 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)) 5460 || is_variable_sized (var)) 5461 goto do_dtor; 5462 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, 5463 ctx)) 5464 || use_pointer_for_field (var, NULL)) 5465 { 5466 x = build_receiver_ref (var, false, ctx); 5467 SET_DECL_VALUE_EXPR (new_var, x); 5468 DECL_HAS_VALUE_EXPR_P (new_var) = 1; 5469 goto do_dtor; 5470 } 5471 } 5472 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) 5473 && omp_is_reference (var)) 5474 { 5475 x = build_outer_var_ref (var, ctx); 5476 gcc_assert (TREE_CODE (x) == MEM_REF 5477 && integer_zerop (TREE_OPERAND (x, 1))); 5478 x = TREE_OPERAND (x, 0); 5479 x = lang_hooks.decls.omp_clause_copy_ctor 5480 (c, unshare_expr (new_var), x); 5481 gimplify_and_add (x, ilist); 5482 goto do_dtor; 5483 } 5484 do_firstprivate: 5485 x = build_outer_var_ref (var, ctx); 5486 if (is_simd) 5487 { 5488 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 5489 && gimple_omp_for_combined_into_p (ctx->stmt)) 5490 { 5491 tree t = OMP_CLAUSE_LINEAR_STEP (c); 5492 tree stept = TREE_TYPE (t); 5493 tree ct = omp_find_clause (clauses, 5494 OMP_CLAUSE__LOOPTEMP_); 5495 gcc_assert (ct); 5496 tree l = OMP_CLAUSE_DECL (ct); 5497 tree n1 = fd->loop.n1; 5498 tree step = fd->loop.step; 5499 tree itype = TREE_TYPE (l); 5500 if (POINTER_TYPE_P (itype)) 5501 itype = signed_type_for (itype); 5502 l = fold_build2 (MINUS_EXPR, itype, l, n1); 5503 if (TYPE_UNSIGNED (itype) 5504 && fd->loop.cond_code == GT_EXPR) 5505 l = fold_build2 (TRUNC_DIV_EXPR, itype, 5506 fold_build1 (NEGATE_EXPR, itype, l), 5507 fold_build1 (NEGATE_EXPR, 5508 itype, step)); 5509 else 5510 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step); 5511 t = fold_build2 (MULT_EXPR, stept, 5512 fold_convert (stept, l), t); 5513 5514 if (OMP_CLAUSE_LINEAR_ARRAY (c)) 5515 { 5516 if (omp_is_reference (var)) 5517 { 5518 gcc_assert (TREE_CODE (new_var) == MEM_REF); 5519 tree new_vard = TREE_OPERAND (new_var, 0); 5520 gcc_assert (DECL_P (new_vard)); 5521 tree type = TREE_TYPE (TREE_TYPE (new_vard)); 5522 nx = TYPE_SIZE_UNIT (type); 5523 if (TREE_CONSTANT (nx)) 5524 { 5525 nx = create_tmp_var_raw (type, 5526 get_name (var)); 5527 gimple_add_tmp_var (nx); 5528 TREE_ADDRESSABLE (nx) = 1; 5529 nx = build_fold_addr_expr_loc (clause_loc, 5530 nx); 5531 nx = fold_convert_loc (clause_loc, 5532 TREE_TYPE (new_vard), 5533 nx); 5534 gimplify_assign (new_vard, nx, ilist); 5535 } 5536 } 5537 5538 x = lang_hooks.decls.omp_clause_linear_ctor 5539 (c, new_var, x, t); 5540 gimplify_and_add (x, ilist); 5541 goto do_dtor; 5542 } 5543 5544 if (POINTER_TYPE_P (TREE_TYPE (x))) 5545 x = fold_build2 (POINTER_PLUS_EXPR, 5546 TREE_TYPE (x), x, t); 5547 else 5548 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t); 5549 } 5550 5551 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR 5552 || TREE_ADDRESSABLE (new_var) 5553 || omp_is_reference (var)) 5554 && lower_rec_simd_input_clauses (new_var, ctx, &sctx, 5555 ivar, lvar)) 5556 { 5557 if (omp_is_reference (var)) 5558 { 5559 gcc_assert (TREE_CODE (new_var) == MEM_REF); 5560 tree new_vard = TREE_OPERAND (new_var, 0); 5561 gcc_assert (DECL_P (new_vard)); 5562 SET_DECL_VALUE_EXPR (new_vard, 5563 build_fold_addr_expr (lvar)); 5564 DECL_HAS_VALUE_EXPR_P (new_vard) = 1; 5565 } 5566 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR) 5567 { 5568 tree iv = create_tmp_var (TREE_TYPE (new_var)); 5569 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x); 5570 gimplify_and_add (x, ilist); 5571 gimple_stmt_iterator gsi 5572 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt)); 5573 gassign *g 5574 = gimple_build_assign (unshare_expr (lvar), iv); 5575 gsi_insert_before_without_update (&gsi, g, 5576 GSI_SAME_STMT); 5577 tree t = OMP_CLAUSE_LINEAR_STEP (c); 5578 enum tree_code code = PLUS_EXPR; 5579 if (POINTER_TYPE_P (TREE_TYPE (new_var))) 5580 code = POINTER_PLUS_EXPR; 5581 g = gimple_build_assign (iv, code, iv, t); 5582 gsi_insert_before_without_update (&gsi, g, 5583 GSI_SAME_STMT); 5584 break; 5585 } 5586 x = lang_hooks.decls.omp_clause_copy_ctor 5587 (c, unshare_expr (ivar), x); 5588 gimplify_and_add (x, &llist[0]); 5589 x = lang_hooks.decls.omp_clause_dtor (c, ivar); 5590 if (x) 5591 gimplify_and_add (x, &llist[1]); 5592 break; 5593 } 5594 if (omp_is_reference (var)) 5595 { 5596 gcc_assert (TREE_CODE (new_var) == MEM_REF); 5597 tree new_vard = TREE_OPERAND (new_var, 0); 5598 gcc_assert (DECL_P (new_vard)); 5599 tree type = TREE_TYPE (TREE_TYPE (new_vard)); 5600 nx = TYPE_SIZE_UNIT (type); 5601 if (TREE_CONSTANT (nx)) 5602 { 5603 nx = create_tmp_var_raw (type, get_name (var)); 5604 gimple_add_tmp_var (nx); 5605 TREE_ADDRESSABLE (nx) = 1; 5606 nx = build_fold_addr_expr_loc (clause_loc, nx); 5607 nx = fold_convert_loc (clause_loc, 5608 TREE_TYPE (new_vard), nx); 5609 gimplify_assign (new_vard, nx, ilist); 5610 } 5611 } 5612 } 5613 x = lang_hooks.decls.omp_clause_copy_ctor 5614 (c, unshare_expr (new_var), x); 5615 gimplify_and_add (x, ilist); 5616 goto do_dtor; 5617 5618 case OMP_CLAUSE__LOOPTEMP_: 5619 case OMP_CLAUSE__REDUCTEMP_: 5620 gcc_assert (is_taskreg_ctx (ctx)); 5621 x = build_outer_var_ref (var, ctx); 5622 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x); 5623 gimplify_and_add (x, ilist); 5624 break; 5625 5626 case OMP_CLAUSE_COPYIN: 5627 by_ref = use_pointer_for_field (var, NULL); 5628 x = build_receiver_ref (var, by_ref, ctx); 5629 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x); 5630 append_to_statement_list (x, ©in_seq); 5631 copyin_by_ref |= by_ref; 5632 break; 5633 5634 case OMP_CLAUSE_REDUCTION: 5635 case OMP_CLAUSE_IN_REDUCTION: 5636 /* OpenACC reductions are initialized using the 5637 GOACC_REDUCTION internal function. */ 5638 if (is_gimple_omp_oacc (ctx->stmt)) 5639 break; 5640 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 5641 { 5642 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); 5643 gimple *tseq; 5644 tree ptype = TREE_TYPE (placeholder); 5645 if (cond) 5646 { 5647 x = error_mark_node; 5648 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c) 5649 && !task_reduction_needs_orig_p) 5650 x = var; 5651 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)) 5652 { 5653 tree pptype = build_pointer_type (ptype); 5654 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION) 5655 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar, 5656 size_int (task_reduction_cnt_full 5657 + task_reduction_cntorig - 1), 5658 NULL_TREE, NULL_TREE); 5659 else 5660 { 5661 unsigned int idx 5662 = *ctx->task_reduction_map->get (c); 5663 x = task_reduction_read (ilist, tskred_temp, 5664 pptype, 7 + 3 * idx); 5665 } 5666 x = fold_convert (pptype, x); 5667 x = build_simple_mem_ref (x); 5668 } 5669 } 5670 else 5671 { 5672 x = build_outer_var_ref (var, ctx); 5673 5674 if (omp_is_reference (var) 5675 && !useless_type_conversion_p (ptype, TREE_TYPE (x))) 5676 x = build_fold_addr_expr_loc (clause_loc, x); 5677 } 5678 SET_DECL_VALUE_EXPR (placeholder, x); 5679 DECL_HAS_VALUE_EXPR_P (placeholder) = 1; 5680 tree new_vard = new_var; 5681 if (omp_is_reference (var)) 5682 { 5683 gcc_assert (TREE_CODE (new_var) == MEM_REF); 5684 new_vard = TREE_OPERAND (new_var, 0); 5685 gcc_assert (DECL_P (new_vard)); 5686 } 5687 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE; 5688 if (is_simd 5689 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION 5690 && OMP_CLAUSE_REDUCTION_INSCAN (c)) 5691 rvarp = &rvar; 5692 if (is_simd 5693 && lower_rec_simd_input_clauses (new_var, ctx, &sctx, 5694 ivar, lvar, rvarp, 5695 &rvar2)) 5696 { 5697 if (new_vard == new_var) 5698 { 5699 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar); 5700 SET_DECL_VALUE_EXPR (new_var, ivar); 5701 } 5702 else 5703 { 5704 SET_DECL_VALUE_EXPR (new_vard, 5705 build_fold_addr_expr (ivar)); 5706 DECL_HAS_VALUE_EXPR_P (new_vard) = 1; 5707 } 5708 x = lang_hooks.decls.omp_clause_default_ctor 5709 (c, unshare_expr (ivar), 5710 build_outer_var_ref (var, ctx)); 5711 if (rvarp && ctx->for_simd_scan_phase) 5712 { 5713 if (x) 5714 gimplify_and_add (x, &llist[0]); 5715 x = lang_hooks.decls.omp_clause_dtor (c, ivar); 5716 if (x) 5717 gimplify_and_add (x, &llist[1]); 5718 break; 5719 } 5720 else if (rvarp) 5721 { 5722 if (x) 5723 { 5724 gimplify_and_add (x, &llist[0]); 5725 5726 tree ivar2 = unshare_expr (lvar); 5727 TREE_OPERAND (ivar2, 1) = sctx.idx; 5728 x = lang_hooks.decls.omp_clause_default_ctor 5729 (c, ivar2, build_outer_var_ref (var, ctx)); 5730 gimplify_and_add (x, &llist[0]); 5731 5732 if (rvar2) 5733 { 5734 x = lang_hooks.decls.omp_clause_default_ctor 5735 (c, unshare_expr (rvar2), 5736 build_outer_var_ref (var, ctx)); 5737 gimplify_and_add (x, &llist[0]); 5738 } 5739 5740 /* For types that need construction, add another 5741 private var which will be default constructed 5742 and optionally initialized with 5743 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the 5744 loop we want to assign this value instead of 5745 constructing and destructing it in each 5746 iteration. */ 5747 tree nv = create_tmp_var_raw (TREE_TYPE (ivar)); 5748 gimple_add_tmp_var (nv); 5749 ctx->cb.decl_map->put (TREE_OPERAND (rvar2 5750 ? rvar2 5751 : ivar, 0), 5752 nv); 5753 x = lang_hooks.decls.omp_clause_default_ctor 5754 (c, nv, build_outer_var_ref (var, ctx)); 5755 gimplify_and_add (x, ilist); 5756 5757 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)) 5758 { 5759 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c); 5760 x = DECL_VALUE_EXPR (new_vard); 5761 tree vexpr = nv; 5762 if (new_vard != new_var) 5763 vexpr = build_fold_addr_expr (nv); 5764 SET_DECL_VALUE_EXPR (new_vard, vexpr); 5765 lower_omp (&tseq, ctx); 5766 SET_DECL_VALUE_EXPR (new_vard, x); 5767 gimple_seq_add_seq (ilist, tseq); 5768 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL; 5769 } 5770 5771 x = lang_hooks.decls.omp_clause_dtor (c, nv); 5772 if (x) 5773 gimplify_and_add (x, dlist); 5774 } 5775 5776 tree ref = build_outer_var_ref (var, ctx); 5777 x = unshare_expr (ivar); 5778 x = lang_hooks.decls.omp_clause_assign_op (c, x, 5779 ref); 5780 gimplify_and_add (x, &llist[0]); 5781 5782 ref = build_outer_var_ref (var, ctx); 5783 x = lang_hooks.decls.omp_clause_assign_op (c, ref, 5784 rvar); 5785 gimplify_and_add (x, &llist[3]); 5786 5787 DECL_HAS_VALUE_EXPR_P (placeholder) = 0; 5788 if (new_vard == new_var) 5789 SET_DECL_VALUE_EXPR (new_var, lvar); 5790 else 5791 SET_DECL_VALUE_EXPR (new_vard, 5792 build_fold_addr_expr (lvar)); 5793 5794 x = lang_hooks.decls.omp_clause_dtor (c, ivar); 5795 if (x) 5796 gimplify_and_add (x, &llist[1]); 5797 5798 tree ivar2 = unshare_expr (lvar); 5799 TREE_OPERAND (ivar2, 1) = sctx.idx; 5800 x = lang_hooks.decls.omp_clause_dtor (c, ivar2); 5801 if (x) 5802 gimplify_and_add (x, &llist[1]); 5803 5804 if (rvar2) 5805 { 5806 x = lang_hooks.decls.omp_clause_dtor (c, rvar2); 5807 if (x) 5808 gimplify_and_add (x, &llist[1]); 5809 } 5810 break; 5811 } 5812 if (x) 5813 gimplify_and_add (x, &llist[0]); 5814 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)) 5815 { 5816 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c); 5817 lower_omp (&tseq, ctx); 5818 gimple_seq_add_seq (&llist[0], tseq); 5819 } 5820 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL; 5821 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c); 5822 lower_omp (&tseq, ctx); 5823 gimple_seq_add_seq (&llist[1], tseq); 5824 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; 5825 DECL_HAS_VALUE_EXPR_P (placeholder) = 0; 5826 if (new_vard == new_var) 5827 SET_DECL_VALUE_EXPR (new_var, lvar); 5828 else 5829 SET_DECL_VALUE_EXPR (new_vard, 5830 build_fold_addr_expr (lvar)); 5831 x = lang_hooks.decls.omp_clause_dtor (c, ivar); 5832 if (x) 5833 gimplify_and_add (x, &llist[1]); 5834 break; 5835 } 5836 /* If this is a reference to constant size reduction var 5837 with placeholder, we haven't emitted the initializer 5838 for it because it is undesirable if SIMD arrays are used. 5839 But if they aren't used, we need to emit the deferred 5840 initialization now. */ 5841 else if (omp_is_reference (var) && is_simd) 5842 handle_simd_reference (clause_loc, new_vard, ilist); 5843 5844 tree lab2 = NULL_TREE; 5845 if (cond) 5846 { 5847 gimple *g; 5848 if (!is_parallel_ctx (ctx)) 5849 { 5850 tree condv = create_tmp_var (boolean_type_node); 5851 tree m = build_simple_mem_ref (cond); 5852 g = gimple_build_assign (condv, m); 5853 gimple_seq_add_stmt (ilist, g); 5854 tree lab1 5855 = create_artificial_label (UNKNOWN_LOCATION); 5856 lab2 = create_artificial_label (UNKNOWN_LOCATION); 5857 g = gimple_build_cond (NE_EXPR, condv, 5858 boolean_false_node, 5859 lab2, lab1); 5860 gimple_seq_add_stmt (ilist, g); 5861 gimple_seq_add_stmt (ilist, 5862 gimple_build_label (lab1)); 5863 } 5864 g = gimple_build_assign (build_simple_mem_ref (cond), 5865 boolean_true_node); 5866 gimple_seq_add_stmt (ilist, g); 5867 } 5868 x = lang_hooks.decls.omp_clause_default_ctor 5869 (c, unshare_expr (new_var), 5870 cond ? NULL_TREE 5871 : build_outer_var_ref (var, ctx)); 5872 if (x) 5873 gimplify_and_add (x, ilist); 5874 5875 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION 5876 && OMP_CLAUSE_REDUCTION_INSCAN (c)) 5877 { 5878 if (ctx->for_simd_scan_phase) 5879 goto do_dtor; 5880 if (x || (!is_simd 5881 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))) 5882 { 5883 tree nv = create_tmp_var_raw (TREE_TYPE (new_var)); 5884 gimple_add_tmp_var (nv); 5885 ctx->cb.decl_map->put (new_vard, nv); 5886 x = lang_hooks.decls.omp_clause_default_ctor 5887 (c, nv, build_outer_var_ref (var, ctx)); 5888 if (x) 5889 gimplify_and_add (x, ilist); 5890 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)) 5891 { 5892 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c); 5893 tree vexpr = nv; 5894 if (new_vard != new_var) 5895 vexpr = build_fold_addr_expr (nv); 5896 SET_DECL_VALUE_EXPR (new_vard, vexpr); 5897 DECL_HAS_VALUE_EXPR_P (new_vard) = 1; 5898 lower_omp (&tseq, ctx); 5899 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE); 5900 DECL_HAS_VALUE_EXPR_P (new_vard) = 0; 5901 gimple_seq_add_seq (ilist, tseq); 5902 } 5903 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL; 5904 if (is_simd && ctx->scan_exclusive) 5905 { 5906 tree nv2 5907 = create_tmp_var_raw (TREE_TYPE (new_var)); 5908 gimple_add_tmp_var (nv2); 5909 ctx->cb.decl_map->put (nv, nv2); 5910 x = lang_hooks.decls.omp_clause_default_ctor 5911 (c, nv2, build_outer_var_ref (var, ctx)); 5912 gimplify_and_add (x, ilist); 5913 x = lang_hooks.decls.omp_clause_dtor (c, nv2); 5914 if (x) 5915 gimplify_and_add (x, dlist); 5916 } 5917 x = lang_hooks.decls.omp_clause_dtor (c, nv); 5918 if (x) 5919 gimplify_and_add (x, dlist); 5920 } 5921 else if (is_simd 5922 && ctx->scan_exclusive 5923 && TREE_ADDRESSABLE (TREE_TYPE (new_var))) 5924 { 5925 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var)); 5926 gimple_add_tmp_var (nv2); 5927 ctx->cb.decl_map->put (new_vard, nv2); 5928 x = lang_hooks.decls.omp_clause_dtor (c, nv2); 5929 if (x) 5930 gimplify_and_add (x, dlist); 5931 } 5932 DECL_HAS_VALUE_EXPR_P (placeholder) = 0; 5933 goto do_dtor; 5934 } 5935 5936 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)) 5937 { 5938 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c); 5939 lower_omp (&tseq, ctx); 5940 gimple_seq_add_seq (ilist, tseq); 5941 } 5942 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL; 5943 if (is_simd) 5944 { 5945 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c); 5946 lower_omp (&tseq, ctx); 5947 gimple_seq_add_seq (dlist, tseq); 5948 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; 5949 } 5950 DECL_HAS_VALUE_EXPR_P (placeholder) = 0; 5951 if (cond) 5952 { 5953 if (lab2) 5954 gimple_seq_add_stmt (ilist, gimple_build_label (lab2)); 5955 break; 5956 } 5957 goto do_dtor; 5958 } 5959 else 5960 { 5961 x = omp_reduction_init (c, TREE_TYPE (new_var)); 5962 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE); 5963 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c); 5964 5965 if (cond) 5966 { 5967 gimple *g; 5968 tree lab2 = NULL_TREE; 5969 /* GOMP_taskgroup_reduction_register memsets the whole 5970 array to zero. If the initializer is zero, we don't 5971 need to initialize it again, just mark it as ever 5972 used unconditionally, i.e. cond = true. */ 5973 if (initializer_zerop (x)) 5974 { 5975 g = gimple_build_assign (build_simple_mem_ref (cond), 5976 boolean_true_node); 5977 gimple_seq_add_stmt (ilist, g); 5978 break; 5979 } 5980 5981 /* Otherwise, emit 5982 if (!cond) { cond = true; new_var = x; } */ 5983 if (!is_parallel_ctx (ctx)) 5984 { 5985 tree condv = create_tmp_var (boolean_type_node); 5986 tree m = build_simple_mem_ref (cond); 5987 g = gimple_build_assign (condv, m); 5988 gimple_seq_add_stmt (ilist, g); 5989 tree lab1 5990 = create_artificial_label (UNKNOWN_LOCATION); 5991 lab2 = create_artificial_label (UNKNOWN_LOCATION); 5992 g = gimple_build_cond (NE_EXPR, condv, 5993 boolean_false_node, 5994 lab2, lab1); 5995 gimple_seq_add_stmt (ilist, g); 5996 gimple_seq_add_stmt (ilist, 5997 gimple_build_label (lab1)); 5998 } 5999 g = gimple_build_assign (build_simple_mem_ref (cond), 6000 boolean_true_node); 6001 gimple_seq_add_stmt (ilist, g); 6002 gimplify_assign (new_var, x, ilist); 6003 if (lab2) 6004 gimple_seq_add_stmt (ilist, gimple_build_label (lab2)); 6005 break; 6006 } 6007 6008 /* reduction(-:var) sums up the partial results, so it 6009 acts identically to reduction(+:var). */ 6010 if (code == MINUS_EXPR) 6011 code = PLUS_EXPR; 6012 6013 bool is_truth_op 6014 = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR); 6015 tree new_vard = new_var; 6016 if (is_simd && omp_is_reference (var)) 6017 { 6018 gcc_assert (TREE_CODE (new_var) == MEM_REF); 6019 new_vard = TREE_OPERAND (new_var, 0); 6020 gcc_assert (DECL_P (new_vard)); 6021 } 6022 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE; 6023 if (is_simd 6024 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION 6025 && OMP_CLAUSE_REDUCTION_INSCAN (c)) 6026 rvarp = &rvar; 6027 if (is_simd 6028 && lower_rec_simd_input_clauses (new_var, ctx, &sctx, 6029 ivar, lvar, rvarp, 6030 &rvar2)) 6031 { 6032 if (new_vard != new_var) 6033 { 6034 SET_DECL_VALUE_EXPR (new_vard, 6035 build_fold_addr_expr (lvar)); 6036 DECL_HAS_VALUE_EXPR_P (new_vard) = 1; 6037 } 6038 6039 tree ref = build_outer_var_ref (var, ctx); 6040 6041 if (rvarp) 6042 { 6043 if (ctx->for_simd_scan_phase) 6044 break; 6045 gimplify_assign (ivar, ref, &llist[0]); 6046 ref = build_outer_var_ref (var, ctx); 6047 gimplify_assign (ref, rvar, &llist[3]); 6048 break; 6049 } 6050 6051 gimplify_assign (unshare_expr (ivar), x, &llist[0]); 6052 6053 if (sctx.is_simt) 6054 { 6055 if (!simt_lane) 6056 simt_lane = create_tmp_var (unsigned_type_node); 6057 x = build_call_expr_internal_loc 6058 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY, 6059 TREE_TYPE (ivar), 2, ivar, simt_lane); 6060 x = build2 (code, TREE_TYPE (ivar), ivar, x); 6061 gimplify_assign (ivar, x, &llist[2]); 6062 } 6063 tree ivar2 = ivar; 6064 tree ref2 = ref; 6065 if (is_truth_op) 6066 { 6067 tree zero = build_zero_cst (TREE_TYPE (ivar)); 6068 ivar2 = fold_build2_loc (clause_loc, NE_EXPR, 6069 boolean_type_node, ivar, 6070 zero); 6071 ref2 = fold_build2_loc (clause_loc, NE_EXPR, 6072 boolean_type_node, ref, 6073 zero); 6074 } 6075 x = build2 (code, TREE_TYPE (ref), ref2, ivar2); 6076 if (is_truth_op) 6077 x = fold_convert (TREE_TYPE (ref), x); 6078 ref = build_outer_var_ref (var, ctx); 6079 gimplify_assign (ref, x, &llist[1]); 6080 6081 } 6082 else 6083 { 6084 if (omp_is_reference (var) && is_simd) 6085 handle_simd_reference (clause_loc, new_vard, ilist); 6086 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION 6087 && OMP_CLAUSE_REDUCTION_INSCAN (c)) 6088 break; 6089 gimplify_assign (new_var, x, ilist); 6090 if (is_simd) 6091 { 6092 tree ref = build_outer_var_ref (var, ctx); 6093 tree new_var2 = new_var; 6094 tree ref2 = ref; 6095 if (is_truth_op) 6096 { 6097 tree zero = build_zero_cst (TREE_TYPE (new_var)); 6098 new_var2 6099 = fold_build2_loc (clause_loc, NE_EXPR, 6100 boolean_type_node, new_var, 6101 zero); 6102 ref2 = fold_build2_loc (clause_loc, NE_EXPR, 6103 boolean_type_node, ref, 6104 zero); 6105 } 6106 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2); 6107 if (is_truth_op) 6108 x = fold_convert (TREE_TYPE (new_var), x); 6109 ref = build_outer_var_ref (var, ctx); 6110 gimplify_assign (ref, x, dlist); 6111 } 6112 } 6113 } 6114 break; 6115 6116 default: 6117 gcc_unreachable (); 6118 } 6119 } 6120 } 6121 if (tskred_avar) 6122 { 6123 tree clobber = build_clobber (TREE_TYPE (tskred_avar)); 6124 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber)); 6125 } 6126 6127 if (known_eq (sctx.max_vf, 1U)) 6128 { 6129 sctx.is_simt = false; 6130 if (ctx->lastprivate_conditional_map) 6131 { 6132 if (gimple_omp_for_combined_into_p (ctx->stmt)) 6133 { 6134 /* Signal to lower_omp_1 that it should use parent context. */ 6135 ctx->combined_into_simd_safelen1 = true; 6136 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) 6137 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 6138 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)) 6139 { 6140 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx); 6141 omp_context *outer = ctx->outer; 6142 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN) 6143 outer = outer->outer; 6144 tree *v = ctx->lastprivate_conditional_map->get (o); 6145 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer); 6146 tree *pv = outer->lastprivate_conditional_map->get (po); 6147 *v = *pv; 6148 } 6149 } 6150 else 6151 { 6152 /* When not vectorized, treat lastprivate(conditional:) like 6153 normal lastprivate, as there will be just one simd lane 6154 writing the privatized variable. */ 6155 delete ctx->lastprivate_conditional_map; 6156 ctx->lastprivate_conditional_map = NULL; 6157 } 6158 } 6159 } 6160 6161 if (nonconst_simd_if) 6162 { 6163 if (sctx.lane == NULL_TREE) 6164 { 6165 sctx.idx = create_tmp_var (unsigned_type_node); 6166 sctx.lane = create_tmp_var (unsigned_type_node); 6167 } 6168 /* FIXME: For now. */ 6169 sctx.is_simt = false; 6170 } 6171 6172 if (sctx.lane || sctx.is_simt) 6173 { 6174 uid = create_tmp_var (ptr_type_node, "simduid"); 6175 /* Don't want uninit warnings on simduid, it is always uninitialized, 6176 but we use it not for the value, but for the DECL_UID only. */ 6177 TREE_NO_WARNING (uid) = 1; 6178 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_); 6179 OMP_CLAUSE__SIMDUID__DECL (c) = uid; 6180 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt); 6181 gimple_omp_for_set_clauses (ctx->stmt, c); 6182 } 6183 /* Emit calls denoting privatized variables and initializing a pointer to 6184 structure that holds private variables as fields after ompdevlow pass. */ 6185 if (sctx.is_simt) 6186 { 6187 sctx.simt_eargs[0] = uid; 6188 gimple *g 6189 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs); 6190 gimple_call_set_lhs (g, uid); 6191 gimple_seq_add_stmt (ilist, g); 6192 sctx.simt_eargs.release (); 6193 6194 simtrec = create_tmp_var (ptr_type_node, ".omp_simt"); 6195 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid); 6196 gimple_call_set_lhs (g, simtrec); 6197 gimple_seq_add_stmt (ilist, g); 6198 } 6199 if (sctx.lane) 6200 { 6201 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 6202 2 + (nonconst_simd_if != NULL), 6203 uid, integer_zero_node, 6204 nonconst_simd_if); 6205 gimple_call_set_lhs (g, sctx.lane); 6206 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt)); 6207 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT); 6208 g = gimple_build_assign (sctx.lane, INTEGER_CST, 6209 build_int_cst (unsigned_type_node, 0)); 6210 gimple_seq_add_stmt (ilist, g); 6211 if (sctx.lastlane) 6212 { 6213 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE, 6214 2, uid, sctx.lane); 6215 gimple_call_set_lhs (g, sctx.lastlane); 6216 gimple_seq_add_stmt (dlist, g); 6217 gimple_seq_add_seq (dlist, llist[3]); 6218 } 6219 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */ 6220 if (llist[2]) 6221 { 6222 tree simt_vf = create_tmp_var (unsigned_type_node); 6223 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0); 6224 gimple_call_set_lhs (g, simt_vf); 6225 gimple_seq_add_stmt (dlist, g); 6226 6227 tree t = build_int_cst (unsigned_type_node, 1); 6228 g = gimple_build_assign (simt_lane, INTEGER_CST, t); 6229 gimple_seq_add_stmt (dlist, g); 6230 6231 t = build_int_cst (unsigned_type_node, 0); 6232 g = gimple_build_assign (sctx.idx, INTEGER_CST, t); 6233 gimple_seq_add_stmt (dlist, g); 6234 6235 tree body = create_artificial_label (UNKNOWN_LOCATION); 6236 tree header = create_artificial_label (UNKNOWN_LOCATION); 6237 tree end = create_artificial_label (UNKNOWN_LOCATION); 6238 gimple_seq_add_stmt (dlist, gimple_build_goto (header)); 6239 gimple_seq_add_stmt (dlist, gimple_build_label (body)); 6240 6241 gimple_seq_add_seq (dlist, llist[2]); 6242 6243 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node); 6244 gimple_seq_add_stmt (dlist, g); 6245 6246 gimple_seq_add_stmt (dlist, gimple_build_label (header)); 6247 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end); 6248 gimple_seq_add_stmt (dlist, g); 6249 6250 gimple_seq_add_stmt (dlist, gimple_build_label (end)); 6251 } 6252 for (int i = 0; i < 2; i++) 6253 if (llist[i]) 6254 { 6255 tree vf = create_tmp_var (unsigned_type_node); 6256 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid); 6257 gimple_call_set_lhs (g, vf); 6258 gimple_seq *seq = i == 0 ? ilist : dlist; 6259 gimple_seq_add_stmt (seq, g); 6260 tree t = build_int_cst (unsigned_type_node, 0); 6261 g = gimple_build_assign (sctx.idx, INTEGER_CST, t); 6262 gimple_seq_add_stmt (seq, g); 6263 tree body = create_artificial_label (UNKNOWN_LOCATION); 6264 tree header = create_artificial_label (UNKNOWN_LOCATION); 6265 tree end = create_artificial_label (UNKNOWN_LOCATION); 6266 gimple_seq_add_stmt (seq, gimple_build_goto (header)); 6267 gimple_seq_add_stmt (seq, gimple_build_label (body)); 6268 gimple_seq_add_seq (seq, llist[i]); 6269 t = build_int_cst (unsigned_type_node, 1); 6270 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t); 6271 gimple_seq_add_stmt (seq, g); 6272 gimple_seq_add_stmt (seq, gimple_build_label (header)); 6273 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end); 6274 gimple_seq_add_stmt (seq, g); 6275 gimple_seq_add_stmt (seq, gimple_build_label (end)); 6276 } 6277 } 6278 if (sctx.is_simt) 6279 { 6280 gimple_seq_add_seq (dlist, sctx.simt_dlist); 6281 gimple *g 6282 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec); 6283 gimple_seq_add_stmt (dlist, g); 6284 } 6285 6286 /* The copyin sequence is not to be executed by the main thread, since 6287 that would result in self-copies. Perhaps not visible to scalars, 6288 but it certainly is to C++ operator=. */ 6289 if (copyin_seq) 6290 { 6291 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM), 6292 0); 6293 x = build2 (NE_EXPR, boolean_type_node, x, 6294 build_int_cst (TREE_TYPE (x), 0)); 6295 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL); 6296 gimplify_and_add (x, ilist); 6297 } 6298 6299 /* If any copyin variable is passed by reference, we must ensure the 6300 master thread doesn't modify it before it is copied over in all 6301 threads. Similarly for variables in both firstprivate and 6302 lastprivate clauses we need to ensure the lastprivate copying 6303 happens after firstprivate copying in all threads. And similarly 6304 for UDRs if initializer expression refers to omp_orig. */ 6305 if (copyin_by_ref || lastprivate_firstprivate 6306 || (reduction_omp_orig_ref 6307 && !ctx->scan_inclusive 6308 && !ctx->scan_exclusive)) 6309 { 6310 /* Don't add any barrier for #pragma omp simd or 6311 #pragma omp distribute. */ 6312 if (!is_task_ctx (ctx) 6313 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR 6314 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)) 6315 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE)); 6316 } 6317 6318 /* If max_vf is non-zero, then we can use only a vectorization factor 6319 up to the max_vf we chose. So stick it into the safelen clause. */ 6320 if (maybe_ne (sctx.max_vf, 0U)) 6321 { 6322 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt), 6323 OMP_CLAUSE_SAFELEN); 6324 poly_uint64 safe_len; 6325 if (c == NULL_TREE 6326 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len) 6327 && maybe_gt (safe_len, sctx.max_vf))) 6328 { 6329 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN); 6330 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node, 6331 sctx.max_vf); 6332 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt); 6333 gimple_omp_for_set_clauses (ctx->stmt, c); 6334 } 6335 } 6336 } 6337 6338 /* Create temporary variables for lastprivate(conditional:) implementation 6339 in context CTX with CLAUSES. */ 6340 6341 static void 6342 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx) 6343 { 6344 tree iter_type = NULL_TREE; 6345 tree cond_ptr = NULL_TREE; 6346 tree iter_var = NULL_TREE; 6347 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR 6348 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD); 6349 tree next = *clauses; 6350 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c)) 6351 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 6352 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)) 6353 { 6354 if (is_simd) 6355 { 6356 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_); 6357 gcc_assert (cc); 6358 if (iter_type == NULL_TREE) 6359 { 6360 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc)); 6361 iter_var = create_tmp_var_raw (iter_type); 6362 DECL_CONTEXT (iter_var) = current_function_decl; 6363 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1; 6364 DECL_CHAIN (iter_var) = ctx->block_vars; 6365 ctx->block_vars = iter_var; 6366 tree c3 6367 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_); 6368 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1; 6369 OMP_CLAUSE_DECL (c3) = iter_var; 6370 OMP_CLAUSE_CHAIN (c3) = *clauses; 6371 *clauses = c3; 6372 ctx->lastprivate_conditional_map = new hash_map<tree, tree>; 6373 } 6374 next = OMP_CLAUSE_CHAIN (cc); 6375 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx); 6376 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx); 6377 ctx->lastprivate_conditional_map->put (o, v); 6378 continue; 6379 } 6380 if (iter_type == NULL) 6381 { 6382 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR) 6383 { 6384 struct omp_for_data fd; 6385 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd, 6386 NULL); 6387 iter_type = unsigned_type_for (fd.iter_type); 6388 } 6389 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS) 6390 iter_type = unsigned_type_node; 6391 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_); 6392 if (c2) 6393 { 6394 cond_ptr 6395 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx); 6396 OMP_CLAUSE_DECL (c2) = cond_ptr; 6397 } 6398 else 6399 { 6400 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type)); 6401 DECL_CONTEXT (cond_ptr) = current_function_decl; 6402 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1; 6403 DECL_CHAIN (cond_ptr) = ctx->block_vars; 6404 ctx->block_vars = cond_ptr; 6405 c2 = build_omp_clause (UNKNOWN_LOCATION, 6406 OMP_CLAUSE__CONDTEMP_); 6407 OMP_CLAUSE_DECL (c2) = cond_ptr; 6408 OMP_CLAUSE_CHAIN (c2) = *clauses; 6409 *clauses = c2; 6410 } 6411 iter_var = create_tmp_var_raw (iter_type); 6412 DECL_CONTEXT (iter_var) = current_function_decl; 6413 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1; 6414 DECL_CHAIN (iter_var) = ctx->block_vars; 6415 ctx->block_vars = iter_var; 6416 tree c3 6417 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_); 6418 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1; 6419 OMP_CLAUSE_DECL (c3) = iter_var; 6420 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2); 6421 OMP_CLAUSE_CHAIN (c2) = c3; 6422 ctx->lastprivate_conditional_map = new hash_map<tree, tree>; 6423 } 6424 tree v = create_tmp_var_raw (iter_type); 6425 DECL_CONTEXT (v) = current_function_decl; 6426 DECL_SEEN_IN_BIND_EXPR_P (v) = 1; 6427 DECL_CHAIN (v) = ctx->block_vars; 6428 ctx->block_vars = v; 6429 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx); 6430 ctx->lastprivate_conditional_map->put (o, v); 6431 } 6432 } 6433 6434 6435 /* Generate code to implement the LASTPRIVATE clauses. This is used for 6436 both parallel and workshare constructs. PREDICATE may be NULL if it's 6437 always true. BODY_P is the sequence to insert early initialization 6438 if needed, STMT_LIST is where the non-conditional lastprivate handling 6439 goes into and CSTMT_LIST is a sequence that needs to be run in a critical 6440 section. */ 6441 6442 static void 6443 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p, 6444 gimple_seq *stmt_list, gimple_seq *cstmt_list, 6445 omp_context *ctx) 6446 { 6447 tree x, c, label = NULL, orig_clauses = clauses; 6448 bool par_clauses = false; 6449 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL; 6450 unsigned HOST_WIDE_INT conditional_off = 0; 6451 gimple_seq post_stmt_list = NULL; 6452 6453 /* Early exit if there are no lastprivate or linear clauses. */ 6454 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses)) 6455 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE 6456 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR 6457 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses))) 6458 break; 6459 if (clauses == NULL) 6460 { 6461 /* If this was a workshare clause, see if it had been combined 6462 with its parallel. In that case, look for the clauses on the 6463 parallel statement itself. */ 6464 if (is_parallel_ctx (ctx)) 6465 return; 6466 6467 ctx = ctx->outer; 6468 if (ctx == NULL || !is_parallel_ctx (ctx)) 6469 return; 6470 6471 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt), 6472 OMP_CLAUSE_LASTPRIVATE); 6473 if (clauses == NULL) 6474 return; 6475 par_clauses = true; 6476 } 6477 6478 bool maybe_simt = false; 6479 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR 6480 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD) 6481 { 6482 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_); 6483 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_); 6484 if (simduid) 6485 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid); 6486 } 6487 6488 if (predicate) 6489 { 6490 gcond *stmt; 6491 tree label_true, arm1, arm2; 6492 enum tree_code pred_code = TREE_CODE (predicate); 6493 6494 label = create_artificial_label (UNKNOWN_LOCATION); 6495 label_true = create_artificial_label (UNKNOWN_LOCATION); 6496 if (TREE_CODE_CLASS (pred_code) == tcc_comparison) 6497 { 6498 arm1 = TREE_OPERAND (predicate, 0); 6499 arm2 = TREE_OPERAND (predicate, 1); 6500 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue); 6501 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue); 6502 } 6503 else 6504 { 6505 arm1 = predicate; 6506 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue); 6507 arm2 = boolean_false_node; 6508 pred_code = NE_EXPR; 6509 } 6510 if (maybe_simt) 6511 { 6512 c = build2 (pred_code, boolean_type_node, arm1, arm2); 6513 c = fold_convert (integer_type_node, c); 6514 simtcond = create_tmp_var (integer_type_node); 6515 gimplify_assign (simtcond, c, stmt_list); 6516 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 6517 1, simtcond); 6518 c = create_tmp_var (integer_type_node); 6519 gimple_call_set_lhs (g, c); 6520 gimple_seq_add_stmt (stmt_list, g); 6521 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node, 6522 label_true, label); 6523 } 6524 else 6525 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label); 6526 gimple_seq_add_stmt (stmt_list, stmt); 6527 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true)); 6528 } 6529 6530 tree cond_ptr = NULL_TREE; 6531 for (c = clauses; c ;) 6532 { 6533 tree var, new_var; 6534 location_t clause_loc = OMP_CLAUSE_LOCATION (c); 6535 gimple_seq *this_stmt_list = stmt_list; 6536 tree lab2 = NULL_TREE; 6537 6538 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 6539 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) 6540 && ctx->lastprivate_conditional_map 6541 && !ctx->combined_into_simd_safelen1) 6542 { 6543 gcc_assert (body_p); 6544 if (simduid) 6545 goto next; 6546 if (cond_ptr == NULL_TREE) 6547 { 6548 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_); 6549 cond_ptr = OMP_CLAUSE_DECL (cond_ptr); 6550 } 6551 tree type = TREE_TYPE (TREE_TYPE (cond_ptr)); 6552 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx); 6553 tree v = *ctx->lastprivate_conditional_map->get (o); 6554 gimplify_assign (v, build_zero_cst (type), body_p); 6555 this_stmt_list = cstmt_list; 6556 tree mem; 6557 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr))) 6558 { 6559 mem = build2 (MEM_REF, type, cond_ptr, 6560 build_int_cst (TREE_TYPE (cond_ptr), 6561 conditional_off)); 6562 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type)); 6563 } 6564 else 6565 mem = build4 (ARRAY_REF, type, cond_ptr, 6566 size_int (conditional_off++), NULL_TREE, NULL_TREE); 6567 tree mem2 = copy_node (mem); 6568 gimple_seq seq = NULL; 6569 mem = force_gimple_operand (mem, &seq, true, NULL_TREE); 6570 gimple_seq_add_seq (this_stmt_list, seq); 6571 tree lab1 = create_artificial_label (UNKNOWN_LOCATION); 6572 lab2 = create_artificial_label (UNKNOWN_LOCATION); 6573 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2); 6574 gimple_seq_add_stmt (this_stmt_list, g); 6575 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1)); 6576 gimplify_assign (mem2, v, this_stmt_list); 6577 } 6578 else if (predicate 6579 && ctx->combined_into_simd_safelen1 6580 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 6581 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) 6582 && ctx->lastprivate_conditional_map) 6583 this_stmt_list = &post_stmt_list; 6584 6585 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 6586 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 6587 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))) 6588 { 6589 var = OMP_CLAUSE_DECL (c); 6590 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 6591 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) 6592 && is_taskloop_ctx (ctx)) 6593 { 6594 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer)); 6595 new_var = lookup_decl (var, ctx->outer); 6596 } 6597 else 6598 { 6599 new_var = lookup_decl (var, ctx); 6600 /* Avoid uninitialized warnings for lastprivate and 6601 for linear iterators. */ 6602 if (predicate 6603 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 6604 || OMP_CLAUSE_LINEAR_NO_COPYIN (c))) 6605 TREE_NO_WARNING (new_var) = 1; 6606 } 6607 6608 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var)) 6609 { 6610 tree val = DECL_VALUE_EXPR (new_var); 6611 if (TREE_CODE (val) == ARRAY_REF 6612 && VAR_P (TREE_OPERAND (val, 0)) 6613 && lookup_attribute ("omp simd array", 6614 DECL_ATTRIBUTES (TREE_OPERAND (val, 6615 0)))) 6616 { 6617 if (lastlane == NULL) 6618 { 6619 lastlane = create_tmp_var (unsigned_type_node); 6620 gcall *g 6621 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE, 6622 2, simduid, 6623 TREE_OPERAND (val, 1)); 6624 gimple_call_set_lhs (g, lastlane); 6625 gimple_seq_add_stmt (this_stmt_list, g); 6626 } 6627 new_var = build4 (ARRAY_REF, TREE_TYPE (val), 6628 TREE_OPERAND (val, 0), lastlane, 6629 NULL_TREE, NULL_TREE); 6630 TREE_THIS_NOTRAP (new_var) = 1; 6631 } 6632 } 6633 else if (maybe_simt) 6634 { 6635 tree val = (DECL_HAS_VALUE_EXPR_P (new_var) 6636 ? DECL_VALUE_EXPR (new_var) 6637 : new_var); 6638 if (simtlast == NULL) 6639 { 6640 simtlast = create_tmp_var (unsigned_type_node); 6641 gcall *g = gimple_build_call_internal 6642 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond); 6643 gimple_call_set_lhs (g, simtlast); 6644 gimple_seq_add_stmt (this_stmt_list, g); 6645 } 6646 x = build_call_expr_internal_loc 6647 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX, 6648 TREE_TYPE (val), 2, val, simtlast); 6649 new_var = unshare_expr (new_var); 6650 gimplify_assign (new_var, x, this_stmt_list); 6651 new_var = unshare_expr (new_var); 6652 } 6653 6654 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 6655 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)) 6656 { 6657 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx); 6658 gimple_seq_add_seq (this_stmt_list, 6659 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)); 6660 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL; 6661 } 6662 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 6663 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)) 6664 { 6665 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx); 6666 gimple_seq_add_seq (this_stmt_list, 6667 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)); 6668 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL; 6669 } 6670 6671 x = NULL_TREE; 6672 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE 6673 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) 6674 && is_taskloop_ctx (ctx)) 6675 { 6676 tree ovar = maybe_lookup_decl_in_outer_ctx (var, 6677 ctx->outer->outer); 6678 if (is_global_var (ovar)) 6679 x = ovar; 6680 } 6681 if (!x) 6682 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE); 6683 if (omp_is_reference (var)) 6684 new_var = build_simple_mem_ref_loc (clause_loc, new_var); 6685 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var); 6686 gimplify_and_add (x, this_stmt_list); 6687 6688 if (lab2) 6689 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2)); 6690 } 6691 6692 next: 6693 c = OMP_CLAUSE_CHAIN (c); 6694 if (c == NULL && !par_clauses) 6695 { 6696 /* If this was a workshare clause, see if it had been combined 6697 with its parallel. In that case, continue looking for the 6698 clauses also on the parallel statement itself. */ 6699 if (is_parallel_ctx (ctx)) 6700 break; 6701 6702 ctx = ctx->outer; 6703 if (ctx == NULL || !is_parallel_ctx (ctx)) 6704 break; 6705 6706 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt), 6707 OMP_CLAUSE_LASTPRIVATE); 6708 par_clauses = true; 6709 } 6710 } 6711 6712 if (label) 6713 gimple_seq_add_stmt (stmt_list, gimple_build_label (label)); 6714 gimple_seq_add_seq (stmt_list, post_stmt_list); 6715 } 6716 6717 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL 6718 (which might be a placeholder). INNER is true if this is an inner 6719 axis of a multi-axis loop. FORK and JOIN are (optional) fork and 6720 join markers. Generate the before-loop forking sequence in 6721 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The 6722 general form of these sequences is 6723 6724 GOACC_REDUCTION_SETUP 6725 GOACC_FORK 6726 GOACC_REDUCTION_INIT 6727 ... 6728 GOACC_REDUCTION_FINI 6729 GOACC_JOIN 6730 GOACC_REDUCTION_TEARDOWN. */ 6731 6732 static void 6733 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner, 6734 gcall *fork, gcall *join, gimple_seq *fork_seq, 6735 gimple_seq *join_seq, omp_context *ctx) 6736 { 6737 gimple_seq before_fork = NULL; 6738 gimple_seq after_fork = NULL; 6739 gimple_seq before_join = NULL; 6740 gimple_seq after_join = NULL; 6741 tree init_code = NULL_TREE, fini_code = NULL_TREE, 6742 setup_code = NULL_TREE, teardown_code = NULL_TREE; 6743 unsigned offset = 0; 6744 6745 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) 6746 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION) 6747 { 6748 /* No 'reduction' clauses on OpenACC 'kernels'. */ 6749 gcc_checking_assert (!is_oacc_kernels (ctx)); 6750 6751 tree orig = OMP_CLAUSE_DECL (c); 6752 tree var = maybe_lookup_decl (orig, ctx); 6753 tree ref_to_res = NULL_TREE; 6754 tree incoming, outgoing, v1, v2, v3; 6755 bool is_private = false; 6756 6757 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c); 6758 if (rcode == MINUS_EXPR) 6759 rcode = PLUS_EXPR; 6760 else if (rcode == TRUTH_ANDIF_EXPR) 6761 rcode = BIT_AND_EXPR; 6762 else if (rcode == TRUTH_ORIF_EXPR) 6763 rcode = BIT_IOR_EXPR; 6764 tree op = build_int_cst (unsigned_type_node, rcode); 6765 6766 if (!var) 6767 var = orig; 6768 6769 incoming = outgoing = var; 6770 6771 if (!inner) 6772 { 6773 /* See if an outer construct also reduces this variable. */ 6774 omp_context *outer = ctx; 6775 6776 while (omp_context *probe = outer->outer) 6777 { 6778 enum gimple_code type = gimple_code (probe->stmt); 6779 tree cls; 6780 6781 switch (type) 6782 { 6783 case GIMPLE_OMP_FOR: 6784 cls = gimple_omp_for_clauses (probe->stmt); 6785 break; 6786 6787 case GIMPLE_OMP_TARGET: 6788 /* No 'reduction' clauses inside OpenACC 'kernels' 6789 regions. */ 6790 gcc_checking_assert (!is_oacc_kernels (probe)); 6791 6792 if (!is_gimple_omp_offloaded (probe->stmt)) 6793 goto do_lookup; 6794 6795 cls = gimple_omp_target_clauses (probe->stmt); 6796 break; 6797 6798 default: 6799 goto do_lookup; 6800 } 6801 6802 outer = probe; 6803 for (; cls; cls = OMP_CLAUSE_CHAIN (cls)) 6804 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION 6805 && orig == OMP_CLAUSE_DECL (cls)) 6806 { 6807 incoming = outgoing = lookup_decl (orig, probe); 6808 goto has_outer_reduction; 6809 } 6810 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE 6811 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE) 6812 && orig == OMP_CLAUSE_DECL (cls)) 6813 { 6814 is_private = true; 6815 goto do_lookup; 6816 } 6817 } 6818 6819 do_lookup: 6820 /* This is the outermost construct with this reduction, 6821 see if there's a mapping for it. */ 6822 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET 6823 && maybe_lookup_field (orig, outer) && !is_private) 6824 { 6825 ref_to_res = build_receiver_ref (orig, false, outer); 6826 if (omp_is_reference (orig)) 6827 ref_to_res = build_simple_mem_ref (ref_to_res); 6828 6829 tree type = TREE_TYPE (var); 6830 if (POINTER_TYPE_P (type)) 6831 type = TREE_TYPE (type); 6832 6833 outgoing = var; 6834 incoming = omp_reduction_init_op (loc, rcode, type); 6835 } 6836 else 6837 { 6838 /* Try to look at enclosing contexts for reduction var, 6839 use original if no mapping found. */ 6840 tree t = NULL_TREE; 6841 omp_context *c = ctx->outer; 6842 while (c && !t) 6843 { 6844 t = maybe_lookup_decl (orig, c); 6845 c = c->outer; 6846 } 6847 incoming = outgoing = (t ? t : orig); 6848 } 6849 6850 has_outer_reduction:; 6851 } 6852 6853 if (!ref_to_res) 6854 ref_to_res = integer_zero_node; 6855 6856 if (omp_is_reference (orig)) 6857 { 6858 tree type = TREE_TYPE (var); 6859 const char *id = IDENTIFIER_POINTER (DECL_NAME (var)); 6860 6861 if (!inner) 6862 { 6863 tree x = create_tmp_var (TREE_TYPE (type), id); 6864 gimplify_assign (var, build_fold_addr_expr (x), fork_seq); 6865 } 6866 6867 v1 = create_tmp_var (type, id); 6868 v2 = create_tmp_var (type, id); 6869 v3 = create_tmp_var (type, id); 6870 6871 gimplify_assign (v1, var, fork_seq); 6872 gimplify_assign (v2, var, fork_seq); 6873 gimplify_assign (v3, var, fork_seq); 6874 6875 var = build_simple_mem_ref (var); 6876 v1 = build_simple_mem_ref (v1); 6877 v2 = build_simple_mem_ref (v2); 6878 v3 = build_simple_mem_ref (v3); 6879 outgoing = build_simple_mem_ref (outgoing); 6880 6881 if (!TREE_CONSTANT (incoming)) 6882 incoming = build_simple_mem_ref (incoming); 6883 } 6884 else 6885 v1 = v2 = v3 = var; 6886 6887 /* Determine position in reduction buffer, which may be used 6888 by target. The parser has ensured that this is not a 6889 variable-sized type. */ 6890 fixed_size_mode mode 6891 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var))); 6892 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT; 6893 offset = (offset + align - 1) & ~(align - 1); 6894 tree off = build_int_cst (sizetype, offset); 6895 offset += GET_MODE_SIZE (mode); 6896 6897 if (!init_code) 6898 { 6899 init_code = build_int_cst (integer_type_node, 6900 IFN_GOACC_REDUCTION_INIT); 6901 fini_code = build_int_cst (integer_type_node, 6902 IFN_GOACC_REDUCTION_FINI); 6903 setup_code = build_int_cst (integer_type_node, 6904 IFN_GOACC_REDUCTION_SETUP); 6905 teardown_code = build_int_cst (integer_type_node, 6906 IFN_GOACC_REDUCTION_TEARDOWN); 6907 } 6908 6909 tree setup_call 6910 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION, 6911 TREE_TYPE (var), 6, setup_code, 6912 unshare_expr (ref_to_res), 6913 incoming, level, op, off); 6914 tree init_call 6915 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION, 6916 TREE_TYPE (var), 6, init_code, 6917 unshare_expr (ref_to_res), 6918 v1, level, op, off); 6919 tree fini_call 6920 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION, 6921 TREE_TYPE (var), 6, fini_code, 6922 unshare_expr (ref_to_res), 6923 v2, level, op, off); 6924 tree teardown_call 6925 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION, 6926 TREE_TYPE (var), 6, teardown_code, 6927 ref_to_res, v3, level, op, off); 6928 6929 gimplify_assign (v1, setup_call, &before_fork); 6930 gimplify_assign (v2, init_call, &after_fork); 6931 gimplify_assign (v3, fini_call, &before_join); 6932 gimplify_assign (outgoing, teardown_call, &after_join); 6933 } 6934 6935 /* Now stitch things together. */ 6936 gimple_seq_add_seq (fork_seq, before_fork); 6937 if (fork) 6938 gimple_seq_add_stmt (fork_seq, fork); 6939 gimple_seq_add_seq (fork_seq, after_fork); 6940 6941 gimple_seq_add_seq (join_seq, before_join); 6942 if (join) 6943 gimple_seq_add_stmt (join_seq, join); 6944 gimple_seq_add_seq (join_seq, after_join); 6945 } 6946 6947 /* Generate code to implement the REDUCTION clauses, append it 6948 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence 6949 that should be emitted also inside of the critical section, 6950 in that case clear *CLIST afterwards, otherwise leave it as is 6951 and let the caller emit it itself. */ 6952 6953 static void 6954 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, 6955 gimple_seq *clist, omp_context *ctx) 6956 { 6957 gimple_seq sub_seq = NULL; 6958 gimple *stmt; 6959 tree x, c; 6960 int count = 0; 6961 6962 /* OpenACC loop reductions are handled elsewhere. */ 6963 if (is_gimple_omp_oacc (ctx->stmt)) 6964 return; 6965 6966 /* SIMD reductions are handled in lower_rec_input_clauses. */ 6967 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR 6968 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD) 6969 return; 6970 6971 /* inscan reductions are handled elsewhere. */ 6972 if (ctx->scan_inclusive || ctx->scan_exclusive) 6973 return; 6974 6975 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC 6976 update in that case, otherwise use a lock. */ 6977 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c)) 6978 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION 6979 && !OMP_CLAUSE_REDUCTION_TASK (c)) 6980 { 6981 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) 6982 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF) 6983 { 6984 /* Never use OMP_ATOMIC for array reductions or UDRs. */ 6985 count = -1; 6986 break; 6987 } 6988 count++; 6989 } 6990 6991 if (count == 0) 6992 return; 6993 6994 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) 6995 { 6996 tree var, ref, new_var, orig_var; 6997 enum tree_code code; 6998 location_t clause_loc = OMP_CLAUSE_LOCATION (c); 6999 7000 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION 7001 || OMP_CLAUSE_REDUCTION_TASK (c)) 7002 continue; 7003 7004 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION; 7005 orig_var = var = OMP_CLAUSE_DECL (c); 7006 if (TREE_CODE (var) == MEM_REF) 7007 { 7008 var = TREE_OPERAND (var, 0); 7009 if (TREE_CODE (var) == POINTER_PLUS_EXPR) 7010 var = TREE_OPERAND (var, 0); 7011 if (TREE_CODE (var) == ADDR_EXPR) 7012 var = TREE_OPERAND (var, 0); 7013 else 7014 { 7015 /* If this is a pointer or referenced based array 7016 section, the var could be private in the outer 7017 context e.g. on orphaned loop construct. Pretend this 7018 is private variable's outer reference. */ 7019 ccode = OMP_CLAUSE_PRIVATE; 7020 if (TREE_CODE (var) == INDIRECT_REF) 7021 var = TREE_OPERAND (var, 0); 7022 } 7023 orig_var = var; 7024 if (is_variable_sized (var)) 7025 { 7026 gcc_assert (DECL_HAS_VALUE_EXPR_P (var)); 7027 var = DECL_VALUE_EXPR (var); 7028 gcc_assert (TREE_CODE (var) == INDIRECT_REF); 7029 var = TREE_OPERAND (var, 0); 7030 gcc_assert (DECL_P (var)); 7031 } 7032 } 7033 new_var = lookup_decl (var, ctx); 7034 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var)) 7035 new_var = build_simple_mem_ref_loc (clause_loc, new_var); 7036 ref = build_outer_var_ref (var, ctx, ccode); 7037 code = OMP_CLAUSE_REDUCTION_CODE (c); 7038 7039 /* reduction(-:var) sums up the partial results, so it acts 7040 identically to reduction(+:var). */ 7041 if (code == MINUS_EXPR) 7042 code = PLUS_EXPR; 7043 7044 bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR); 7045 if (count == 1) 7046 { 7047 tree addr = build_fold_addr_expr_loc (clause_loc, ref); 7048 7049 addr = save_expr (addr); 7050 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr); 7051 tree new_var2 = new_var; 7052 tree ref2 = ref; 7053 if (is_truth_op) 7054 { 7055 tree zero = build_zero_cst (TREE_TYPE (new_var)); 7056 new_var2 = fold_build2_loc (clause_loc, NE_EXPR, 7057 boolean_type_node, new_var, zero); 7058 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node, 7059 ref, zero); 7060 } 7061 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2, 7062 new_var2); 7063 if (is_truth_op) 7064 x = fold_convert (TREE_TYPE (new_var), x); 7065 x = build2 (OMP_ATOMIC, void_type_node, addr, x); 7066 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED; 7067 gimplify_and_add (x, stmt_seqp); 7068 return; 7069 } 7070 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF) 7071 { 7072 tree d = OMP_CLAUSE_DECL (c); 7073 tree type = TREE_TYPE (d); 7074 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); 7075 tree i = create_tmp_var (TREE_TYPE (v)); 7076 tree ptype = build_pointer_type (TREE_TYPE (type)); 7077 tree bias = TREE_OPERAND (d, 1); 7078 d = TREE_OPERAND (d, 0); 7079 if (TREE_CODE (d) == POINTER_PLUS_EXPR) 7080 { 7081 tree b = TREE_OPERAND (d, 1); 7082 b = maybe_lookup_decl (b, ctx); 7083 if (b == NULL) 7084 { 7085 b = TREE_OPERAND (d, 1); 7086 b = maybe_lookup_decl_in_outer_ctx (b, ctx); 7087 } 7088 if (integer_zerop (bias)) 7089 bias = b; 7090 else 7091 { 7092 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias); 7093 bias = fold_build2_loc (clause_loc, PLUS_EXPR, 7094 TREE_TYPE (b), b, bias); 7095 } 7096 d = TREE_OPERAND (d, 0); 7097 } 7098 /* For ref build_outer_var_ref already performs this, so 7099 only new_var needs a dereference. */ 7100 if (TREE_CODE (d) == INDIRECT_REF) 7101 { 7102 new_var = build_simple_mem_ref_loc (clause_loc, new_var); 7103 gcc_assert (omp_is_reference (var) && var == orig_var); 7104 } 7105 else if (TREE_CODE (d) == ADDR_EXPR) 7106 { 7107 if (orig_var == var) 7108 { 7109 new_var = build_fold_addr_expr (new_var); 7110 ref = build_fold_addr_expr (ref); 7111 } 7112 } 7113 else 7114 { 7115 gcc_assert (orig_var == var); 7116 if (omp_is_reference (var)) 7117 ref = build_fold_addr_expr (ref); 7118 } 7119 if (DECL_P (v)) 7120 { 7121 tree t = maybe_lookup_decl (v, ctx); 7122 if (t) 7123 v = t; 7124 else 7125 v = maybe_lookup_decl_in_outer_ctx (v, ctx); 7126 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue); 7127 } 7128 if (!integer_zerop (bias)) 7129 { 7130 bias = fold_convert_loc (clause_loc, sizetype, bias); 7131 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR, 7132 TREE_TYPE (new_var), new_var, 7133 unshare_expr (bias)); 7134 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR, 7135 TREE_TYPE (ref), ref, bias); 7136 } 7137 new_var = fold_convert_loc (clause_loc, ptype, new_var); 7138 ref = fold_convert_loc (clause_loc, ptype, ref); 7139 tree m = create_tmp_var (ptype); 7140 gimplify_assign (m, new_var, stmt_seqp); 7141 new_var = m; 7142 m = create_tmp_var (ptype); 7143 gimplify_assign (m, ref, stmt_seqp); 7144 ref = m; 7145 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp); 7146 tree body = create_artificial_label (UNKNOWN_LOCATION); 7147 tree end = create_artificial_label (UNKNOWN_LOCATION); 7148 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body)); 7149 tree priv = build_simple_mem_ref_loc (clause_loc, new_var); 7150 tree out = build_simple_mem_ref_loc (clause_loc, ref); 7151 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 7152 { 7153 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); 7154 tree decl_placeholder 7155 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c); 7156 SET_DECL_VALUE_EXPR (placeholder, out); 7157 DECL_HAS_VALUE_EXPR_P (placeholder) = 1; 7158 SET_DECL_VALUE_EXPR (decl_placeholder, priv); 7159 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1; 7160 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx); 7161 gimple_seq_add_seq (&sub_seq, 7162 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)); 7163 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; 7164 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL; 7165 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL; 7166 } 7167 else 7168 { 7169 tree out2 = out; 7170 tree priv2 = priv; 7171 if (is_truth_op) 7172 { 7173 tree zero = build_zero_cst (TREE_TYPE (out)); 7174 out2 = fold_build2_loc (clause_loc, NE_EXPR, 7175 boolean_type_node, out, zero); 7176 priv2 = fold_build2_loc (clause_loc, NE_EXPR, 7177 boolean_type_node, priv, zero); 7178 } 7179 x = build2 (code, TREE_TYPE (out2), out2, priv2); 7180 if (is_truth_op) 7181 x = fold_convert (TREE_TYPE (out), x); 7182 out = unshare_expr (out); 7183 gimplify_assign (out, x, &sub_seq); 7184 } 7185 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var, 7186 TYPE_SIZE_UNIT (TREE_TYPE (type))); 7187 gimple_seq_add_stmt (&sub_seq, g); 7188 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref, 7189 TYPE_SIZE_UNIT (TREE_TYPE (type))); 7190 gimple_seq_add_stmt (&sub_seq, g); 7191 g = gimple_build_assign (i, PLUS_EXPR, i, 7192 build_int_cst (TREE_TYPE (i), 1)); 7193 gimple_seq_add_stmt (&sub_seq, g); 7194 g = gimple_build_cond (LE_EXPR, i, v, body, end); 7195 gimple_seq_add_stmt (&sub_seq, g); 7196 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end)); 7197 } 7198 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 7199 { 7200 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); 7201 7202 if (omp_is_reference (var) 7203 && !useless_type_conversion_p (TREE_TYPE (placeholder), 7204 TREE_TYPE (ref))) 7205 ref = build_fold_addr_expr_loc (clause_loc, ref); 7206 SET_DECL_VALUE_EXPR (placeholder, ref); 7207 DECL_HAS_VALUE_EXPR_P (placeholder) = 1; 7208 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx); 7209 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)); 7210 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; 7211 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL; 7212 } 7213 else 7214 { 7215 tree new_var2 = new_var; 7216 tree ref2 = ref; 7217 if (is_truth_op) 7218 { 7219 tree zero = build_zero_cst (TREE_TYPE (new_var)); 7220 new_var2 = fold_build2_loc (clause_loc, NE_EXPR, 7221 boolean_type_node, new_var, zero); 7222 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node, 7223 ref, zero); 7224 } 7225 x = build2 (code, TREE_TYPE (ref), ref2, new_var2); 7226 if (is_truth_op) 7227 x = fold_convert (TREE_TYPE (new_var), x); 7228 ref = build_outer_var_ref (var, ctx); 7229 gimplify_assign (ref, x, &sub_seq); 7230 } 7231 } 7232 7233 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START), 7234 0); 7235 gimple_seq_add_stmt (stmt_seqp, stmt); 7236 7237 gimple_seq_add_seq (stmt_seqp, sub_seq); 7238 7239 if (clist) 7240 { 7241 gimple_seq_add_seq (stmt_seqp, *clist); 7242 *clist = NULL; 7243 } 7244 7245 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END), 7246 0); 7247 gimple_seq_add_stmt (stmt_seqp, stmt); 7248 } 7249 7250 7251 /* Generate code to implement the COPYPRIVATE clauses. */ 7252 7253 static void 7254 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist, 7255 omp_context *ctx) 7256 { 7257 tree c; 7258 7259 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) 7260 { 7261 tree var, new_var, ref, x; 7262 bool by_ref; 7263 location_t clause_loc = OMP_CLAUSE_LOCATION (c); 7264 7265 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE) 7266 continue; 7267 7268 var = OMP_CLAUSE_DECL (c); 7269 by_ref = use_pointer_for_field (var, NULL); 7270 7271 ref = build_sender_ref (var, ctx); 7272 x = new_var = lookup_decl_in_outer_ctx (var, ctx); 7273 if (by_ref) 7274 { 7275 x = build_fold_addr_expr_loc (clause_loc, new_var); 7276 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x); 7277 } 7278 gimplify_assign (ref, x, slist); 7279 7280 ref = build_receiver_ref (var, false, ctx); 7281 if (by_ref) 7282 { 7283 ref = fold_convert_loc (clause_loc, 7284 build_pointer_type (TREE_TYPE (new_var)), 7285 ref); 7286 ref = build_fold_indirect_ref_loc (clause_loc, ref); 7287 } 7288 if (omp_is_reference (var)) 7289 { 7290 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref); 7291 ref = build_simple_mem_ref_loc (clause_loc, ref); 7292 new_var = build_simple_mem_ref_loc (clause_loc, new_var); 7293 } 7294 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref); 7295 gimplify_and_add (x, rlist); 7296 } 7297 } 7298 7299 7300 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE, 7301 and REDUCTION from the sender (aka parent) side. */ 7302 7303 static void 7304 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist, 7305 omp_context *ctx) 7306 { 7307 tree c, t; 7308 int ignored_looptemp = 0; 7309 bool is_taskloop = false; 7310 7311 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized 7312 by GOMP_taskloop. */ 7313 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt)) 7314 { 7315 ignored_looptemp = 2; 7316 is_taskloop = true; 7317 } 7318 7319 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) 7320 { 7321 tree val, ref, x, var; 7322 bool by_ref, do_in = false, do_out = false; 7323 location_t clause_loc = OMP_CLAUSE_LOCATION (c); 7324 7325 switch (OMP_CLAUSE_CODE (c)) 7326 { 7327 case OMP_CLAUSE_PRIVATE: 7328 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c)) 7329 break; 7330 continue; 7331 case OMP_CLAUSE_FIRSTPRIVATE: 7332 case OMP_CLAUSE_COPYIN: 7333 case OMP_CLAUSE_LASTPRIVATE: 7334 case OMP_CLAUSE_IN_REDUCTION: 7335 case OMP_CLAUSE__REDUCTEMP_: 7336 break; 7337 case OMP_CLAUSE_REDUCTION: 7338 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c)) 7339 continue; 7340 break; 7341 case OMP_CLAUSE_SHARED: 7342 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)) 7343 break; 7344 continue; 7345 case OMP_CLAUSE__LOOPTEMP_: 7346 if (ignored_looptemp) 7347 { 7348 ignored_looptemp--; 7349 continue; 7350 } 7351 break; 7352 default: 7353 continue; 7354 } 7355 7356 val = OMP_CLAUSE_DECL (c); 7357 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION 7358 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION) 7359 && TREE_CODE (val) == MEM_REF) 7360 { 7361 val = TREE_OPERAND (val, 0); 7362 if (TREE_CODE (val) == POINTER_PLUS_EXPR) 7363 val = TREE_OPERAND (val, 0); 7364 if (TREE_CODE (val) == INDIRECT_REF 7365 || TREE_CODE (val) == ADDR_EXPR) 7366 val = TREE_OPERAND (val, 0); 7367 if (is_variable_sized (val)) 7368 continue; 7369 } 7370 7371 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the 7372 outer taskloop region. */ 7373 omp_context *ctx_for_o = ctx; 7374 if (is_taskloop 7375 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED 7376 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)) 7377 ctx_for_o = ctx->outer; 7378 7379 var = lookup_decl_in_outer_ctx (val, ctx_for_o); 7380 7381 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN 7382 && is_global_var (var) 7383 && (val == OMP_CLAUSE_DECL (c) 7384 || !is_task_ctx (ctx) 7385 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE 7386 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE 7387 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val))) 7388 != POINTER_TYPE))))) 7389 continue; 7390 7391 t = omp_member_access_dummy_var (var); 7392 if (t) 7393 { 7394 var = DECL_VALUE_EXPR (var); 7395 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o); 7396 if (o != t) 7397 var = unshare_and_remap (var, t, o); 7398 else 7399 var = unshare_expr (var); 7400 } 7401 7402 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED) 7403 { 7404 /* Handle taskloop firstprivate/lastprivate, where the 7405 lastprivate on GIMPLE_OMP_TASK is represented as 7406 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */ 7407 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx); 7408 x = omp_build_component_ref (ctx->sender_decl, f); 7409 if (use_pointer_for_field (val, ctx)) 7410 var = build_fold_addr_expr (var); 7411 gimplify_assign (x, var, ilist); 7412 DECL_ABSTRACT_ORIGIN (f) = NULL; 7413 continue; 7414 } 7415 7416 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION 7417 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION) 7418 || val == OMP_CLAUSE_DECL (c)) 7419 && is_variable_sized (val)) 7420 continue; 7421 by_ref = use_pointer_for_field (val, NULL); 7422 7423 switch (OMP_CLAUSE_CODE (c)) 7424 { 7425 case OMP_CLAUSE_FIRSTPRIVATE: 7426 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) 7427 && !by_ref 7428 && is_task_ctx (ctx)) 7429 TREE_NO_WARNING (var) = 1; 7430 do_in = true; 7431 break; 7432 7433 case OMP_CLAUSE_PRIVATE: 7434 case OMP_CLAUSE_COPYIN: 7435 case OMP_CLAUSE__LOOPTEMP_: 7436 case OMP_CLAUSE__REDUCTEMP_: 7437 do_in = true; 7438 break; 7439 7440 case OMP_CLAUSE_LASTPRIVATE: 7441 if (by_ref || omp_is_reference (val)) 7442 { 7443 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)) 7444 continue; 7445 do_in = true; 7446 } 7447 else 7448 { 7449 do_out = true; 7450 if (lang_hooks.decls.omp_private_outer_ref (val)) 7451 do_in = true; 7452 } 7453 break; 7454 7455 case OMP_CLAUSE_REDUCTION: 7456 case OMP_CLAUSE_IN_REDUCTION: 7457 do_in = true; 7458 if (val == OMP_CLAUSE_DECL (c)) 7459 { 7460 if (is_task_ctx (ctx)) 7461 by_ref = use_pointer_for_field (val, ctx); 7462 else 7463 do_out = !(by_ref || omp_is_reference (val)); 7464 } 7465 else 7466 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE; 7467 break; 7468 7469 default: 7470 gcc_unreachable (); 7471 } 7472 7473 if (do_in) 7474 { 7475 ref = build_sender_ref (val, ctx); 7476 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var; 7477 gimplify_assign (ref, x, ilist); 7478 if (is_task_ctx (ctx)) 7479 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL; 7480 } 7481 7482 if (do_out) 7483 { 7484 ref = build_sender_ref (val, ctx); 7485 gimplify_assign (var, ref, olist); 7486 } 7487 } 7488 } 7489 7490 /* Generate code to implement SHARED from the sender (aka parent) 7491 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't 7492 list things that got automatically shared. */ 7493 7494 static void 7495 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx) 7496 { 7497 tree var, ovar, nvar, t, f, x, record_type; 7498 7499 if (ctx->record_type == NULL) 7500 return; 7501 7502 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type; 7503 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f)) 7504 { 7505 ovar = DECL_ABSTRACT_ORIGIN (f); 7506 if (!ovar || TREE_CODE (ovar) == FIELD_DECL) 7507 continue; 7508 7509 nvar = maybe_lookup_decl (ovar, ctx); 7510 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar)) 7511 continue; 7512 7513 /* If CTX is a nested parallel directive. Find the immediately 7514 enclosing parallel or workshare construct that contains a 7515 mapping for OVAR. */ 7516 var = lookup_decl_in_outer_ctx (ovar, ctx); 7517 7518 t = omp_member_access_dummy_var (var); 7519 if (t) 7520 { 7521 var = DECL_VALUE_EXPR (var); 7522 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx); 7523 if (o != t) 7524 var = unshare_and_remap (var, t, o); 7525 else 7526 var = unshare_expr (var); 7527 } 7528 7529 if (use_pointer_for_field (ovar, ctx)) 7530 { 7531 x = build_sender_ref (ovar, ctx); 7532 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE 7533 && TREE_TYPE (f) == TREE_TYPE (ovar)) 7534 { 7535 gcc_assert (is_parallel_ctx (ctx) 7536 && DECL_ARTIFICIAL (ovar)); 7537 /* _condtemp_ clause. */ 7538 var = build_constructor (TREE_TYPE (x), NULL); 7539 } 7540 else 7541 var = build_fold_addr_expr (var); 7542 gimplify_assign (x, var, ilist); 7543 } 7544 else 7545 { 7546 x = build_sender_ref (ovar, ctx); 7547 gimplify_assign (x, var, ilist); 7548 7549 if (!TREE_READONLY (var) 7550 /* We don't need to receive a new reference to a result 7551 or parm decl. In fact we may not store to it as we will 7552 invalidate any pending RSO and generate wrong gimple 7553 during inlining. */ 7554 && !((TREE_CODE (var) == RESULT_DECL 7555 || TREE_CODE (var) == PARM_DECL) 7556 && DECL_BY_REFERENCE (var))) 7557 { 7558 x = build_sender_ref (ovar, ctx); 7559 gimplify_assign (var, x, olist); 7560 } 7561 } 7562 } 7563 } 7564 7565 /* Emit an OpenACC head marker call, encapulating the partitioning and 7566 other information that must be processed by the target compiler. 7567 Return the maximum number of dimensions the associated loop might 7568 be partitioned over. */ 7569 7570 static unsigned 7571 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses, 7572 gimple_seq *seq, omp_context *ctx) 7573 { 7574 unsigned levels = 0; 7575 unsigned tag = 0; 7576 tree gang_static = NULL_TREE; 7577 auto_vec<tree, 5> args; 7578 7579 args.quick_push (build_int_cst 7580 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK)); 7581 args.quick_push (ddvar); 7582 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) 7583 { 7584 switch (OMP_CLAUSE_CODE (c)) 7585 { 7586 case OMP_CLAUSE_GANG: 7587 tag |= OLF_DIM_GANG; 7588 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c); 7589 /* static:* is represented by -1, and we can ignore it, as 7590 scheduling is always static. */ 7591 if (gang_static && integer_minus_onep (gang_static)) 7592 gang_static = NULL_TREE; 7593 levels++; 7594 break; 7595 7596 case OMP_CLAUSE_WORKER: 7597 tag |= OLF_DIM_WORKER; 7598 levels++; 7599 break; 7600 7601 case OMP_CLAUSE_VECTOR: 7602 tag |= OLF_DIM_VECTOR; 7603 levels++; 7604 break; 7605 7606 case OMP_CLAUSE_SEQ: 7607 tag |= OLF_SEQ; 7608 break; 7609 7610 case OMP_CLAUSE_AUTO: 7611 tag |= OLF_AUTO; 7612 break; 7613 7614 case OMP_CLAUSE_INDEPENDENT: 7615 tag |= OLF_INDEPENDENT; 7616 break; 7617 7618 case OMP_CLAUSE_TILE: 7619 tag |= OLF_TILE; 7620 break; 7621 7622 default: 7623 continue; 7624 } 7625 } 7626 7627 if (gang_static) 7628 { 7629 if (DECL_P (gang_static)) 7630 gang_static = build_outer_var_ref (gang_static, ctx); 7631 tag |= OLF_GANG_STATIC; 7632 } 7633 7634 omp_context *tgt = enclosing_target_ctx (ctx); 7635 if (!tgt || is_oacc_parallel_or_serial (tgt)) 7636 ; 7637 else if (is_oacc_kernels (tgt)) 7638 /* Not using this loops handling inside OpenACC 'kernels' regions. */ 7639 gcc_unreachable (); 7640 else 7641 gcc_unreachable (); 7642 7643 /* In a parallel region, loops are implicitly INDEPENDENT. */ 7644 if (!tgt || is_oacc_parallel_or_serial (tgt)) 7645 tag |= OLF_INDEPENDENT; 7646 7647 if (tag & OLF_TILE) 7648 /* Tiling could use all 3 levels. */ 7649 levels = 3; 7650 else 7651 { 7652 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO. 7653 Ensure at least one level, or 2 for possible auto 7654 partitioning */ 7655 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1) 7656 << OLF_DIM_BASE) | OLF_SEQ)); 7657 7658 if (levels < 1u + maybe_auto) 7659 levels = 1u + maybe_auto; 7660 } 7661 7662 args.quick_push (build_int_cst (integer_type_node, levels)); 7663 args.quick_push (build_int_cst (integer_type_node, tag)); 7664 if (gang_static) 7665 args.quick_push (gang_static); 7666 7667 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args); 7668 gimple_set_location (call, loc); 7669 gimple_set_lhs (call, ddvar); 7670 gimple_seq_add_stmt (seq, call); 7671 7672 return levels; 7673 } 7674 7675 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the 7676 partitioning level of the enclosed region. */ 7677 7678 static void 7679 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head, 7680 tree tofollow, gimple_seq *seq) 7681 { 7682 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK 7683 : IFN_UNIQUE_OACC_TAIL_MARK); 7684 tree marker = build_int_cst (integer_type_node, marker_kind); 7685 int nargs = 2 + (tofollow != NULL_TREE); 7686 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs, 7687 marker, ddvar, tofollow); 7688 gimple_set_location (call, loc); 7689 gimple_set_lhs (call, ddvar); 7690 gimple_seq_add_stmt (seq, call); 7691 } 7692 7693 /* Generate the before and after OpenACC loop sequences. CLAUSES are 7694 the loop clauses, from which we extract reductions. Initialize 7695 HEAD and TAIL. */ 7696 7697 static void 7698 lower_oacc_head_tail (location_t loc, tree clauses, 7699 gimple_seq *head, gimple_seq *tail, omp_context *ctx) 7700 { 7701 bool inner = false; 7702 tree ddvar = create_tmp_var (integer_type_node, ".data_dep"); 7703 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node)); 7704 7705 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx); 7706 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK); 7707 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN); 7708 7709 gcc_assert (count); 7710 for (unsigned done = 1; count; count--, done++) 7711 { 7712 gimple_seq fork_seq = NULL; 7713 gimple_seq join_seq = NULL; 7714 7715 tree place = build_int_cst (integer_type_node, -1); 7716 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3, 7717 fork_kind, ddvar, place); 7718 gimple_set_location (fork, loc); 7719 gimple_set_lhs (fork, ddvar); 7720 7721 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3, 7722 join_kind, ddvar, place); 7723 gimple_set_location (join, loc); 7724 gimple_set_lhs (join, ddvar); 7725 7726 /* Mark the beginning of this level sequence. */ 7727 if (inner) 7728 lower_oacc_loop_marker (loc, ddvar, true, 7729 build_int_cst (integer_type_node, count), 7730 &fork_seq); 7731 lower_oacc_loop_marker (loc, ddvar, false, 7732 build_int_cst (integer_type_node, done), 7733 &join_seq); 7734 7735 lower_oacc_reductions (loc, clauses, place, inner, 7736 fork, join, &fork_seq, &join_seq, ctx); 7737 7738 /* Append this level to head. */ 7739 gimple_seq_add_seq (head, fork_seq); 7740 /* Prepend it to tail. */ 7741 gimple_seq_add_seq (&join_seq, *tail); 7742 *tail = join_seq; 7743 7744 inner = true; 7745 } 7746 7747 /* Mark the end of the sequence. */ 7748 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head); 7749 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail); 7750 } 7751 7752 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW 7753 catch handler and return it. This prevents programs from violating the 7754 structured block semantics with throws. */ 7755 7756 static gimple_seq 7757 maybe_catch_exception (gimple_seq body) 7758 { 7759 gimple *g; 7760 tree decl; 7761 7762 if (!flag_exceptions) 7763 return body; 7764 7765 if (lang_hooks.eh_protect_cleanup_actions != NULL) 7766 decl = lang_hooks.eh_protect_cleanup_actions (); 7767 else 7768 decl = builtin_decl_explicit (BUILT_IN_TRAP); 7769 7770 g = gimple_build_eh_must_not_throw (decl); 7771 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g), 7772 GIMPLE_TRY_CATCH); 7773 7774 return gimple_seq_alloc_with_stmt (g); 7775 } 7776 7777 7778 /* Routines to lower OMP directives into OMP-GIMPLE. */ 7779 7780 /* If ctx is a worksharing context inside of a cancellable parallel 7781 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN 7782 and conditional branch to parallel's cancel_label to handle 7783 cancellation in the implicit barrier. */ 7784 7785 static void 7786 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return, 7787 gimple_seq *body) 7788 { 7789 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN); 7790 if (gimple_omp_return_nowait_p (omp_return)) 7791 return; 7792 for (omp_context *outer = ctx->outer; outer; outer = outer->outer) 7793 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL 7794 && outer->cancellable) 7795 { 7796 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL); 7797 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl)); 7798 tree lhs = create_tmp_var (c_bool_type); 7799 gimple_omp_return_set_lhs (omp_return, lhs); 7800 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION); 7801 gimple *g = gimple_build_cond (NE_EXPR, lhs, 7802 fold_convert (c_bool_type, 7803 boolean_false_node), 7804 outer->cancel_label, fallthru_label); 7805 gimple_seq_add_stmt (body, g); 7806 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label)); 7807 } 7808 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP) 7809 return; 7810 } 7811 7812 /* Find the first task_reduction or reduction clause or return NULL 7813 if there are none. */ 7814 7815 static inline tree 7816 omp_task_reductions_find_first (tree clauses, enum tree_code code, 7817 enum omp_clause_code ccode) 7818 { 7819 while (1) 7820 { 7821 clauses = omp_find_clause (clauses, ccode); 7822 if (clauses == NULL_TREE) 7823 return NULL_TREE; 7824 if (ccode != OMP_CLAUSE_REDUCTION 7825 || code == OMP_TASKLOOP 7826 || OMP_CLAUSE_REDUCTION_TASK (clauses)) 7827 return clauses; 7828 clauses = OMP_CLAUSE_CHAIN (clauses); 7829 } 7830 } 7831 7832 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree, 7833 gimple_seq *, gimple_seq *); 7834 7835 /* Lower the OpenMP sections directive in the current statement in GSI_P. 7836 CTX is the enclosing OMP context for the current statement. */ 7837 7838 static void 7839 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx) 7840 { 7841 tree block, control; 7842 gimple_stmt_iterator tgsi; 7843 gomp_sections *stmt; 7844 gimple *t; 7845 gbind *new_stmt, *bind; 7846 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body; 7847 7848 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p)); 7849 7850 push_gimplify_context (); 7851 7852 dlist = NULL; 7853 ilist = NULL; 7854 7855 tree rclauses 7856 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt), 7857 OMP_SECTIONS, OMP_CLAUSE_REDUCTION); 7858 tree rtmp = NULL_TREE; 7859 if (rclauses) 7860 { 7861 tree type = build_pointer_type (pointer_sized_int_node); 7862 tree temp = create_tmp_var (type); 7863 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_); 7864 OMP_CLAUSE_DECL (c) = temp; 7865 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt); 7866 gimple_omp_sections_set_clauses (stmt, c); 7867 lower_omp_task_reductions (ctx, OMP_SECTIONS, 7868 gimple_omp_sections_clauses (stmt), 7869 &ilist, &tred_dlist); 7870 rclauses = c; 7871 rtmp = make_ssa_name (type); 7872 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp)); 7873 } 7874 7875 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt); 7876 lower_lastprivate_conditional_clauses (clauses_ptr, ctx); 7877 7878 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt), 7879 &ilist, &dlist, ctx, NULL); 7880 7881 control = create_tmp_var (unsigned_type_node, ".section"); 7882 gimple_omp_sections_set_control (stmt, control); 7883 7884 new_body = gimple_omp_body (stmt); 7885 gimple_omp_set_body (stmt, NULL); 7886 tgsi = gsi_start (new_body); 7887 for (; !gsi_end_p (tgsi); gsi_next (&tgsi)) 7888 { 7889 omp_context *sctx; 7890 gimple *sec_start; 7891 7892 sec_start = gsi_stmt (tgsi); 7893 sctx = maybe_lookup_ctx (sec_start); 7894 gcc_assert (sctx); 7895 7896 lower_omp (gimple_omp_body_ptr (sec_start), sctx); 7897 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start), 7898 GSI_CONTINUE_LINKING); 7899 gimple_omp_set_body (sec_start, NULL); 7900 7901 if (gsi_one_before_end_p (tgsi)) 7902 { 7903 gimple_seq l = NULL; 7904 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL, 7905 &ilist, &l, &clist, ctx); 7906 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING); 7907 gimple_omp_section_set_last (sec_start); 7908 } 7909 7910 gsi_insert_after (&tgsi, gimple_build_omp_return (false), 7911 GSI_CONTINUE_LINKING); 7912 } 7913 7914 block = make_node (BLOCK); 7915 bind = gimple_build_bind (NULL, new_body, block); 7916 7917 olist = NULL; 7918 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, 7919 &clist, ctx); 7920 if (clist) 7921 { 7922 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START); 7923 gcall *g = gimple_build_call (fndecl, 0); 7924 gimple_seq_add_stmt (&olist, g); 7925 gimple_seq_add_seq (&olist, clist); 7926 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END); 7927 g = gimple_build_call (fndecl, 0); 7928 gimple_seq_add_stmt (&olist, g); 7929 } 7930 7931 block = make_node (BLOCK); 7932 new_stmt = gimple_build_bind (NULL, NULL, block); 7933 gsi_replace (gsi_p, new_stmt, true); 7934 7935 pop_gimplify_context (new_stmt); 7936 gimple_bind_append_vars (new_stmt, ctx->block_vars); 7937 BLOCK_VARS (block) = gimple_bind_vars (bind); 7938 if (BLOCK_VARS (block)) 7939 TREE_USED (block) = 1; 7940 7941 new_body = NULL; 7942 gimple_seq_add_seq (&new_body, ilist); 7943 gimple_seq_add_stmt (&new_body, stmt); 7944 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ()); 7945 gimple_seq_add_stmt (&new_body, bind); 7946 7947 t = gimple_build_omp_continue (control, control); 7948 gimple_seq_add_stmt (&new_body, t); 7949 7950 gimple_seq_add_seq (&new_body, olist); 7951 if (ctx->cancellable) 7952 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label)); 7953 gimple_seq_add_seq (&new_body, dlist); 7954 7955 new_body = maybe_catch_exception (new_body); 7956 7957 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt), 7958 OMP_CLAUSE_NOWAIT) != NULL_TREE; 7959 t = gimple_build_omp_return (nowait); 7960 gimple_seq_add_stmt (&new_body, t); 7961 gimple_seq_add_seq (&new_body, tred_dlist); 7962 maybe_add_implicit_barrier_cancel (ctx, t, &new_body); 7963 7964 if (rclauses) 7965 OMP_CLAUSE_DECL (rclauses) = rtmp; 7966 7967 gimple_bind_set_body (new_stmt, new_body); 7968 } 7969 7970 7971 /* A subroutine of lower_omp_single. Expand the simple form of 7972 a GIMPLE_OMP_SINGLE, without a copyprivate clause: 7973 7974 if (GOMP_single_start ()) 7975 BODY; 7976 [ GOMP_barrier (); ] -> unless 'nowait' is present. 7977 7978 FIXME. It may be better to delay expanding the logic of this until 7979 pass_expand_omp. The expanded logic may make the job more difficult 7980 to a synchronization analysis pass. */ 7981 7982 static void 7983 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p) 7984 { 7985 location_t loc = gimple_location (single_stmt); 7986 tree tlabel = create_artificial_label (loc); 7987 tree flabel = create_artificial_label (loc); 7988 gimple *call, *cond; 7989 tree lhs, decl; 7990 7991 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START); 7992 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl))); 7993 call = gimple_build_call (decl, 0); 7994 gimple_call_set_lhs (call, lhs); 7995 gimple_seq_add_stmt (pre_p, call); 7996 7997 cond = gimple_build_cond (EQ_EXPR, lhs, 7998 fold_convert_loc (loc, TREE_TYPE (lhs), 7999 boolean_true_node), 8000 tlabel, flabel); 8001 gimple_seq_add_stmt (pre_p, cond); 8002 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel)); 8003 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt)); 8004 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel)); 8005 } 8006 8007 8008 /* A subroutine of lower_omp_single. Expand the simple form of 8009 a GIMPLE_OMP_SINGLE, with a copyprivate clause: 8010 8011 #pragma omp single copyprivate (a, b, c) 8012 8013 Create a new structure to hold copies of 'a', 'b' and 'c' and emit: 8014 8015 { 8016 if ((copyout_p = GOMP_single_copy_start ()) == NULL) 8017 { 8018 BODY; 8019 copyout.a = a; 8020 copyout.b = b; 8021 copyout.c = c; 8022 GOMP_single_copy_end (©out); 8023 } 8024 else 8025 { 8026 a = copyout_p->a; 8027 b = copyout_p->b; 8028 c = copyout_p->c; 8029 } 8030 GOMP_barrier (); 8031 } 8032 8033 FIXME. It may be better to delay expanding the logic of this until 8034 pass_expand_omp. The expanded logic may make the job more difficult 8035 to a synchronization analysis pass. */ 8036 8037 static void 8038 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p, 8039 omp_context *ctx) 8040 { 8041 tree ptr_type, t, l0, l1, l2, bfn_decl; 8042 gimple_seq copyin_seq; 8043 location_t loc = gimple_location (single_stmt); 8044 8045 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o"); 8046 8047 ptr_type = build_pointer_type (ctx->record_type); 8048 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i"); 8049 8050 l0 = create_artificial_label (loc); 8051 l1 = create_artificial_label (loc); 8052 l2 = create_artificial_label (loc); 8053 8054 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START); 8055 t = build_call_expr_loc (loc, bfn_decl, 0); 8056 t = fold_convert_loc (loc, ptr_type, t); 8057 gimplify_assign (ctx->receiver_decl, t, pre_p); 8058 8059 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl, 8060 build_int_cst (ptr_type, 0)); 8061 t = build3 (COND_EXPR, void_type_node, t, 8062 build_and_jump (&l0), build_and_jump (&l1)); 8063 gimplify_and_add (t, pre_p); 8064 8065 gimple_seq_add_stmt (pre_p, gimple_build_label (l0)); 8066 8067 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt)); 8068 8069 copyin_seq = NULL; 8070 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p, 8071 ©in_seq, ctx); 8072 8073 t = build_fold_addr_expr_loc (loc, ctx->sender_decl); 8074 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END); 8075 t = build_call_expr_loc (loc, bfn_decl, 1, t); 8076 gimplify_and_add (t, pre_p); 8077 8078 t = build_and_jump (&l2); 8079 gimplify_and_add (t, pre_p); 8080 8081 gimple_seq_add_stmt (pre_p, gimple_build_label (l1)); 8082 8083 gimple_seq_add_seq (pre_p, copyin_seq); 8084 8085 gimple_seq_add_stmt (pre_p, gimple_build_label (l2)); 8086 } 8087 8088 8089 /* Expand code for an OpenMP single directive. */ 8090 8091 static void 8092 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx) 8093 { 8094 tree block; 8095 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p)); 8096 gbind *bind; 8097 gimple_seq bind_body, bind_body_tail = NULL, dlist; 8098 8099 push_gimplify_context (); 8100 8101 block = make_node (BLOCK); 8102 bind = gimple_build_bind (NULL, NULL, block); 8103 gsi_replace (gsi_p, bind, true); 8104 bind_body = NULL; 8105 dlist = NULL; 8106 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt), 8107 &bind_body, &dlist, ctx, NULL); 8108 lower_omp (gimple_omp_body_ptr (single_stmt), ctx); 8109 8110 gimple_seq_add_stmt (&bind_body, single_stmt); 8111 8112 if (ctx->record_type) 8113 lower_omp_single_copy (single_stmt, &bind_body, ctx); 8114 else 8115 lower_omp_single_simple (single_stmt, &bind_body); 8116 8117 gimple_omp_set_body (single_stmt, NULL); 8118 8119 gimple_seq_add_seq (&bind_body, dlist); 8120 8121 bind_body = maybe_catch_exception (bind_body); 8122 8123 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt), 8124 OMP_CLAUSE_NOWAIT) != NULL_TREE; 8125 gimple *g = gimple_build_omp_return (nowait); 8126 gimple_seq_add_stmt (&bind_body_tail, g); 8127 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail); 8128 if (ctx->record_type) 8129 { 8130 gimple_stmt_iterator gsi = gsi_start (bind_body_tail); 8131 tree clobber = build_clobber (ctx->record_type); 8132 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl, 8133 clobber), GSI_SAME_STMT); 8134 } 8135 gimple_seq_add_seq (&bind_body, bind_body_tail); 8136 gimple_bind_set_body (bind, bind_body); 8137 8138 pop_gimplify_context (bind); 8139 8140 gimple_bind_append_vars (bind, ctx->block_vars); 8141 BLOCK_VARS (block) = ctx->block_vars; 8142 if (BLOCK_VARS (block)) 8143 TREE_USED (block) = 1; 8144 } 8145 8146 8147 /* Expand code for an OpenMP master directive. */ 8148 8149 static void 8150 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx) 8151 { 8152 tree block, lab = NULL, x, bfn_decl; 8153 gimple *stmt = gsi_stmt (*gsi_p); 8154 gbind *bind; 8155 location_t loc = gimple_location (stmt); 8156 gimple_seq tseq; 8157 8158 push_gimplify_context (); 8159 8160 block = make_node (BLOCK); 8161 bind = gimple_build_bind (NULL, NULL, block); 8162 gsi_replace (gsi_p, bind, true); 8163 gimple_bind_add_stmt (bind, stmt); 8164 8165 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM); 8166 x = build_call_expr_loc (loc, bfn_decl, 0); 8167 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node); 8168 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab)); 8169 tseq = NULL; 8170 gimplify_and_add (x, &tseq); 8171 gimple_bind_add_seq (bind, tseq); 8172 8173 lower_omp (gimple_omp_body_ptr (stmt), ctx); 8174 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt))); 8175 gimple_bind_add_seq (bind, gimple_omp_body (stmt)); 8176 gimple_omp_set_body (stmt, NULL); 8177 8178 gimple_bind_add_stmt (bind, gimple_build_label (lab)); 8179 8180 gimple_bind_add_stmt (bind, gimple_build_omp_return (true)); 8181 8182 pop_gimplify_context (bind); 8183 8184 gimple_bind_append_vars (bind, ctx->block_vars); 8185 BLOCK_VARS (block) = ctx->block_vars; 8186 } 8187 8188 /* Helper function for lower_omp_task_reductions. For a specific PASS 8189 find out the current clause it should be processed, or return false 8190 if all have been processed already. */ 8191 8192 static inline bool 8193 omp_task_reduction_iterate (int pass, enum tree_code code, 8194 enum omp_clause_code ccode, tree *c, tree *decl, 8195 tree *type, tree *next) 8196 { 8197 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode)) 8198 { 8199 if (ccode == OMP_CLAUSE_REDUCTION 8200 && code != OMP_TASKLOOP 8201 && !OMP_CLAUSE_REDUCTION_TASK (*c)) 8202 continue; 8203 *decl = OMP_CLAUSE_DECL (*c); 8204 *type = TREE_TYPE (*decl); 8205 if (TREE_CODE (*decl) == MEM_REF) 8206 { 8207 if (pass != 1) 8208 continue; 8209 } 8210 else 8211 { 8212 if (omp_is_reference (*decl)) 8213 *type = TREE_TYPE (*type); 8214 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type)))) 8215 continue; 8216 } 8217 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode); 8218 return true; 8219 } 8220 *decl = NULL_TREE; 8221 *type = NULL_TREE; 8222 *next = NULL_TREE; 8223 return false; 8224 } 8225 8226 /* Lower task_reduction and reduction clauses (the latter unless CODE is 8227 OMP_TASKGROUP only with task modifier). Register mapping of those in 8228 START sequence and reducing them and unregister them in the END sequence. */ 8229 8230 static void 8231 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses, 8232 gimple_seq *start, gimple_seq *end) 8233 { 8234 enum omp_clause_code ccode 8235 = (code == OMP_TASKGROUP 8236 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION); 8237 tree cancellable = NULL_TREE; 8238 clauses = omp_task_reductions_find_first (clauses, code, ccode); 8239 if (clauses == NULL_TREE) 8240 return; 8241 if (code == OMP_FOR || code == OMP_SECTIONS) 8242 { 8243 for (omp_context *outer = ctx->outer; outer; outer = outer->outer) 8244 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL 8245 && outer->cancellable) 8246 { 8247 cancellable = error_mark_node; 8248 break; 8249 } 8250 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP) 8251 break; 8252 } 8253 tree record_type = lang_hooks.types.make_type (RECORD_TYPE); 8254 tree *last = &TYPE_FIELDS (record_type); 8255 unsigned cnt = 0; 8256 if (cancellable) 8257 { 8258 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, 8259 ptr_type_node); 8260 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE, 8261 integer_type_node); 8262 *last = field; 8263 DECL_CHAIN (field) = ifield; 8264 last = &DECL_CHAIN (ifield); 8265 DECL_CONTEXT (field) = record_type; 8266 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field)) 8267 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field)); 8268 DECL_CONTEXT (ifield) = record_type; 8269 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield)) 8270 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield)); 8271 } 8272 for (int pass = 0; pass < 2; pass++) 8273 { 8274 tree decl, type, next; 8275 for (tree c = clauses; 8276 omp_task_reduction_iterate (pass, code, ccode, 8277 &c, &decl, &type, &next); c = next) 8278 { 8279 ++cnt; 8280 tree new_type = type; 8281 if (ctx->outer) 8282 new_type = remap_type (type, &ctx->outer->cb); 8283 tree field 8284 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, 8285 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE, 8286 new_type); 8287 if (DECL_P (decl) && type == TREE_TYPE (decl)) 8288 { 8289 SET_DECL_ALIGN (field, DECL_ALIGN (decl)); 8290 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl); 8291 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl); 8292 } 8293 else 8294 SET_DECL_ALIGN (field, TYPE_ALIGN (type)); 8295 DECL_CONTEXT (field) = record_type; 8296 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field)) 8297 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field)); 8298 *last = field; 8299 last = &DECL_CHAIN (field); 8300 tree bfield 8301 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE, 8302 boolean_type_node); 8303 DECL_CONTEXT (bfield) = record_type; 8304 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield)) 8305 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield)); 8306 *last = bfield; 8307 last = &DECL_CHAIN (bfield); 8308 } 8309 } 8310 *last = NULL_TREE; 8311 layout_type (record_type); 8312 8313 /* Build up an array which registers with the runtime all the reductions 8314 and deregisters them at the end. Format documented in libgomp/task.c. */ 8315 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3); 8316 tree avar = create_tmp_var_raw (atype); 8317 gimple_add_tmp_var (avar); 8318 TREE_ADDRESSABLE (avar) = 1; 8319 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node, 8320 NULL_TREE, NULL_TREE); 8321 tree t = build_int_cst (pointer_sized_int_node, cnt); 8322 gimple_seq_add_stmt (start, gimple_build_assign (r, t)); 8323 gimple_seq seq = NULL; 8324 tree sz = fold_convert (pointer_sized_int_node, 8325 TYPE_SIZE_UNIT (record_type)); 8326 int cachesz = 64; 8327 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz, 8328 build_int_cst (pointer_sized_int_node, cachesz - 1)); 8329 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz, 8330 build_int_cst (pointer_sized_int_node, ~(cachesz - 1))); 8331 ctx->task_reductions.create (1 + cnt); 8332 ctx->task_reduction_map = new hash_map<tree, unsigned>; 8333 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST 8334 ? sz : NULL_TREE); 8335 sz = force_gimple_operand (sz, &seq, true, NULL_TREE); 8336 gimple_seq_add_seq (start, seq); 8337 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node, 8338 NULL_TREE, NULL_TREE); 8339 gimple_seq_add_stmt (start, gimple_build_assign (r, sz)); 8340 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2), 8341 NULL_TREE, NULL_TREE); 8342 t = build_int_cst (pointer_sized_int_node, 8343 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz)); 8344 gimple_seq_add_stmt (start, gimple_build_assign (r, t)); 8345 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3), 8346 NULL_TREE, NULL_TREE); 8347 t = build_int_cst (pointer_sized_int_node, -1); 8348 gimple_seq_add_stmt (start, gimple_build_assign (r, t)); 8349 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4), 8350 NULL_TREE, NULL_TREE); 8351 t = build_int_cst (pointer_sized_int_node, 0); 8352 gimple_seq_add_stmt (start, gimple_build_assign (r, t)); 8353 8354 /* In end, build a loop that iterates from 0 to < omp_get_num_threads () 8355 and for each task reduction checks a bool right after the private variable 8356 within that thread's chunk; if the bool is clear, it hasn't been 8357 initialized and thus isn't going to be reduced nor destructed, otherwise 8358 reduce and destruct it. */ 8359 tree idx = create_tmp_var (size_type_node); 8360 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node)); 8361 tree num_thr_sz = create_tmp_var (size_type_node); 8362 tree lab1 = create_artificial_label (UNKNOWN_LOCATION); 8363 tree lab2 = create_artificial_label (UNKNOWN_LOCATION); 8364 tree lab3 = NULL_TREE, lab7 = NULL_TREE; 8365 gimple *g; 8366 if (code == OMP_FOR || code == OMP_SECTIONS) 8367 { 8368 /* For worksharing constructs, only perform it in the master thread, 8369 with the exception of cancelled implicit barriers - then only handle 8370 the current thread. */ 8371 tree lab4 = create_artificial_label (UNKNOWN_LOCATION); 8372 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM); 8373 tree thr_num = create_tmp_var (integer_type_node); 8374 g = gimple_build_call (t, 0); 8375 gimple_call_set_lhs (g, thr_num); 8376 gimple_seq_add_stmt (end, g); 8377 if (cancellable) 8378 { 8379 tree c; 8380 tree lab5 = create_artificial_label (UNKNOWN_LOCATION); 8381 tree lab6 = create_artificial_label (UNKNOWN_LOCATION); 8382 lab3 = create_artificial_label (UNKNOWN_LOCATION); 8383 if (code == OMP_FOR) 8384 c = gimple_omp_for_clauses (ctx->stmt); 8385 else /* if (code == OMP_SECTIONS) */ 8386 c = gimple_omp_sections_clauses (ctx->stmt); 8387 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_)); 8388 cancellable = c; 8389 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)), 8390 lab5, lab6); 8391 gimple_seq_add_stmt (end, g); 8392 gimple_seq_add_stmt (end, gimple_build_label (lab5)); 8393 g = gimple_build_assign (idx, NOP_EXPR, thr_num); 8394 gimple_seq_add_stmt (end, g); 8395 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx, 8396 build_one_cst (TREE_TYPE (idx))); 8397 gimple_seq_add_stmt (end, g); 8398 gimple_seq_add_stmt (end, gimple_build_goto (lab3)); 8399 gimple_seq_add_stmt (end, gimple_build_label (lab6)); 8400 } 8401 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4); 8402 gimple_seq_add_stmt (end, g); 8403 gimple_seq_add_stmt (end, gimple_build_label (lab4)); 8404 } 8405 if (code != OMP_PARALLEL) 8406 { 8407 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS); 8408 tree num_thr = create_tmp_var (integer_type_node); 8409 g = gimple_build_call (t, 0); 8410 gimple_call_set_lhs (g, num_thr); 8411 gimple_seq_add_stmt (end, g); 8412 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr); 8413 gimple_seq_add_stmt (end, g); 8414 if (cancellable) 8415 gimple_seq_add_stmt (end, gimple_build_label (lab3)); 8416 } 8417 else 8418 { 8419 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt), 8420 OMP_CLAUSE__REDUCTEMP_); 8421 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c)); 8422 t = fold_convert (size_type_node, t); 8423 gimplify_assign (num_thr_sz, t, end); 8424 } 8425 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2), 8426 NULL_TREE, NULL_TREE); 8427 tree data = create_tmp_var (pointer_sized_int_node); 8428 gimple_seq_add_stmt (end, gimple_build_assign (data, t)); 8429 if (code == OMP_TASKLOOP) 8430 { 8431 lab7 = create_artificial_label (UNKNOWN_LOCATION); 8432 g = gimple_build_cond (NE_EXPR, data, 8433 build_zero_cst (pointer_sized_int_node), 8434 lab1, lab7); 8435 gimple_seq_add_stmt (end, g); 8436 } 8437 gimple_seq_add_stmt (end, gimple_build_label (lab1)); 8438 tree ptr; 8439 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST) 8440 ptr = create_tmp_var (build_pointer_type (record_type)); 8441 else 8442 ptr = create_tmp_var (ptr_type_node); 8443 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data)); 8444 8445 tree field = TYPE_FIELDS (record_type); 8446 cnt = 0; 8447 if (cancellable) 8448 field = DECL_CHAIN (DECL_CHAIN (field)); 8449 for (int pass = 0; pass < 2; pass++) 8450 { 8451 tree decl, type, next; 8452 for (tree c = clauses; 8453 omp_task_reduction_iterate (pass, code, ccode, 8454 &c, &decl, &type, &next); c = next) 8455 { 8456 tree var = decl, ref; 8457 if (TREE_CODE (decl) == MEM_REF) 8458 { 8459 var = TREE_OPERAND (var, 0); 8460 if (TREE_CODE (var) == POINTER_PLUS_EXPR) 8461 var = TREE_OPERAND (var, 0); 8462 tree v = var; 8463 if (TREE_CODE (var) == ADDR_EXPR) 8464 var = TREE_OPERAND (var, 0); 8465 else if (TREE_CODE (var) == INDIRECT_REF) 8466 var = TREE_OPERAND (var, 0); 8467 tree orig_var = var; 8468 if (is_variable_sized (var)) 8469 { 8470 gcc_assert (DECL_HAS_VALUE_EXPR_P (var)); 8471 var = DECL_VALUE_EXPR (var); 8472 gcc_assert (TREE_CODE (var) == INDIRECT_REF); 8473 var = TREE_OPERAND (var, 0); 8474 gcc_assert (DECL_P (var)); 8475 } 8476 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx); 8477 if (orig_var != var) 8478 gcc_assert (TREE_CODE (v) == ADDR_EXPR); 8479 else if (TREE_CODE (v) == ADDR_EXPR) 8480 t = build_fold_addr_expr (t); 8481 else if (TREE_CODE (v) == INDIRECT_REF) 8482 t = build_fold_indirect_ref (t); 8483 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR) 8484 { 8485 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1); 8486 b = maybe_lookup_decl_in_outer_ctx (b, ctx); 8487 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b); 8488 } 8489 if (!integer_zerop (TREE_OPERAND (decl, 1))) 8490 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, 8491 fold_convert (size_type_node, 8492 TREE_OPERAND (decl, 1))); 8493 } 8494 else 8495 { 8496 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx); 8497 if (!omp_is_reference (decl)) 8498 t = build_fold_addr_expr (t); 8499 } 8500 t = fold_convert (pointer_sized_int_node, t); 8501 seq = NULL; 8502 t = force_gimple_operand (t, &seq, true, NULL_TREE); 8503 gimple_seq_add_seq (start, seq); 8504 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, 8505 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE); 8506 gimple_seq_add_stmt (start, gimple_build_assign (r, t)); 8507 t = unshare_expr (byte_position (field)); 8508 t = fold_convert (pointer_sized_int_node, t); 8509 ctx->task_reduction_map->put (c, cnt); 8510 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST 8511 ? t : NULL_TREE); 8512 seq = NULL; 8513 t = force_gimple_operand (t, &seq, true, NULL_TREE); 8514 gimple_seq_add_seq (start, seq); 8515 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, 8516 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE); 8517 gimple_seq_add_stmt (start, gimple_build_assign (r, t)); 8518 8519 tree bfield = DECL_CHAIN (field); 8520 tree cond; 8521 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS) 8522 /* In parallel or worksharing all threads unconditionally 8523 initialize all their task reduction private variables. */ 8524 cond = boolean_true_node; 8525 else if (TREE_TYPE (ptr) == ptr_type_node) 8526 { 8527 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr, 8528 unshare_expr (byte_position (bfield))); 8529 seq = NULL; 8530 cond = force_gimple_operand (cond, &seq, true, NULL_TREE); 8531 gimple_seq_add_seq (end, seq); 8532 tree pbool = build_pointer_type (TREE_TYPE (bfield)); 8533 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond, 8534 build_int_cst (pbool, 0)); 8535 } 8536 else 8537 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield), 8538 build_simple_mem_ref (ptr), bfield, NULL_TREE); 8539 tree lab3 = create_artificial_label (UNKNOWN_LOCATION); 8540 tree lab4 = create_artificial_label (UNKNOWN_LOCATION); 8541 tree condv = create_tmp_var (boolean_type_node); 8542 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond)); 8543 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node, 8544 lab3, lab4); 8545 gimple_seq_add_stmt (end, g); 8546 gimple_seq_add_stmt (end, gimple_build_label (lab3)); 8547 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE) 8548 { 8549 /* If this reduction doesn't need destruction and parallel 8550 has been cancelled, there is nothing to do for this 8551 reduction, so jump around the merge operation. */ 8552 tree lab5 = create_artificial_label (UNKNOWN_LOCATION); 8553 g = gimple_build_cond (NE_EXPR, cancellable, 8554 build_zero_cst (TREE_TYPE (cancellable)), 8555 lab4, lab5); 8556 gimple_seq_add_stmt (end, g); 8557 gimple_seq_add_stmt (end, gimple_build_label (lab5)); 8558 } 8559 8560 tree new_var; 8561 if (TREE_TYPE (ptr) == ptr_type_node) 8562 { 8563 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr, 8564 unshare_expr (byte_position (field))); 8565 seq = NULL; 8566 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE); 8567 gimple_seq_add_seq (end, seq); 8568 tree pbool = build_pointer_type (TREE_TYPE (field)); 8569 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var, 8570 build_int_cst (pbool, 0)); 8571 } 8572 else 8573 new_var = build3 (COMPONENT_REF, TREE_TYPE (field), 8574 build_simple_mem_ref (ptr), field, NULL_TREE); 8575 8576 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c); 8577 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl)) 8578 ref = build_simple_mem_ref (ref); 8579 /* reduction(-:var) sums up the partial results, so it acts 8580 identically to reduction(+:var). */ 8581 if (rcode == MINUS_EXPR) 8582 rcode = PLUS_EXPR; 8583 if (TREE_CODE (decl) == MEM_REF) 8584 { 8585 tree type = TREE_TYPE (new_var); 8586 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type)); 8587 tree i = create_tmp_var (TREE_TYPE (v)); 8588 tree ptype = build_pointer_type (TREE_TYPE (type)); 8589 if (DECL_P (v)) 8590 { 8591 v = maybe_lookup_decl_in_outer_ctx (v, ctx); 8592 tree vv = create_tmp_var (TREE_TYPE (v)); 8593 gimplify_assign (vv, v, start); 8594 v = vv; 8595 } 8596 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar, 8597 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE); 8598 new_var = build_fold_addr_expr (new_var); 8599 new_var = fold_convert (ptype, new_var); 8600 ref = fold_convert (ptype, ref); 8601 tree m = create_tmp_var (ptype); 8602 gimplify_assign (m, new_var, end); 8603 new_var = m; 8604 m = create_tmp_var (ptype); 8605 gimplify_assign (m, ref, end); 8606 ref = m; 8607 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end); 8608 tree body = create_artificial_label (UNKNOWN_LOCATION); 8609 tree endl = create_artificial_label (UNKNOWN_LOCATION); 8610 gimple_seq_add_stmt (end, gimple_build_label (body)); 8611 tree priv = build_simple_mem_ref (new_var); 8612 tree out = build_simple_mem_ref (ref); 8613 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 8614 { 8615 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); 8616 tree decl_placeholder 8617 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c); 8618 tree lab6 = NULL_TREE; 8619 if (cancellable) 8620 { 8621 /* If this reduction needs destruction and parallel 8622 has been cancelled, jump around the merge operation 8623 to the destruction. */ 8624 tree lab5 = create_artificial_label (UNKNOWN_LOCATION); 8625 lab6 = create_artificial_label (UNKNOWN_LOCATION); 8626 tree zero = build_zero_cst (TREE_TYPE (cancellable)); 8627 g = gimple_build_cond (NE_EXPR, cancellable, zero, 8628 lab6, lab5); 8629 gimple_seq_add_stmt (end, g); 8630 gimple_seq_add_stmt (end, gimple_build_label (lab5)); 8631 } 8632 SET_DECL_VALUE_EXPR (placeholder, out); 8633 DECL_HAS_VALUE_EXPR_P (placeholder) = 1; 8634 SET_DECL_VALUE_EXPR (decl_placeholder, priv); 8635 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1; 8636 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx); 8637 gimple_seq_add_seq (end, 8638 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)); 8639 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; 8640 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) 8641 { 8642 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL; 8643 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL; 8644 } 8645 if (cancellable) 8646 gimple_seq_add_stmt (end, gimple_build_label (lab6)); 8647 tree x = lang_hooks.decls.omp_clause_dtor (c, priv); 8648 if (x) 8649 { 8650 gimple_seq tseq = NULL; 8651 gimplify_stmt (&x, &tseq); 8652 gimple_seq_add_seq (end, tseq); 8653 } 8654 } 8655 else 8656 { 8657 tree x = build2 (rcode, TREE_TYPE (out), out, priv); 8658 out = unshare_expr (out); 8659 gimplify_assign (out, x, end); 8660 } 8661 gimple *g 8662 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var, 8663 TYPE_SIZE_UNIT (TREE_TYPE (type))); 8664 gimple_seq_add_stmt (end, g); 8665 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref, 8666 TYPE_SIZE_UNIT (TREE_TYPE (type))); 8667 gimple_seq_add_stmt (end, g); 8668 g = gimple_build_assign (i, PLUS_EXPR, i, 8669 build_int_cst (TREE_TYPE (i), 1)); 8670 gimple_seq_add_stmt (end, g); 8671 g = gimple_build_cond (LE_EXPR, i, v, body, endl); 8672 gimple_seq_add_stmt (end, g); 8673 gimple_seq_add_stmt (end, gimple_build_label (endl)); 8674 } 8675 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 8676 { 8677 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); 8678 tree oldv = NULL_TREE; 8679 tree lab6 = NULL_TREE; 8680 if (cancellable) 8681 { 8682 /* If this reduction needs destruction and parallel 8683 has been cancelled, jump around the merge operation 8684 to the destruction. */ 8685 tree lab5 = create_artificial_label (UNKNOWN_LOCATION); 8686 lab6 = create_artificial_label (UNKNOWN_LOCATION); 8687 tree zero = build_zero_cst (TREE_TYPE (cancellable)); 8688 g = gimple_build_cond (NE_EXPR, cancellable, zero, 8689 lab6, lab5); 8690 gimple_seq_add_stmt (end, g); 8691 gimple_seq_add_stmt (end, gimple_build_label (lab5)); 8692 } 8693 if (omp_is_reference (decl) 8694 && !useless_type_conversion_p (TREE_TYPE (placeholder), 8695 TREE_TYPE (ref))) 8696 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref); 8697 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref); 8698 tree refv = create_tmp_var (TREE_TYPE (ref)); 8699 gimplify_assign (refv, ref, end); 8700 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv); 8701 SET_DECL_VALUE_EXPR (placeholder, ref); 8702 DECL_HAS_VALUE_EXPR_P (placeholder) = 1; 8703 tree d = maybe_lookup_decl (decl, ctx); 8704 gcc_assert (d); 8705 if (DECL_HAS_VALUE_EXPR_P (d)) 8706 oldv = DECL_VALUE_EXPR (d); 8707 if (omp_is_reference (var)) 8708 { 8709 tree v = fold_convert (TREE_TYPE (d), 8710 build_fold_addr_expr (new_var)); 8711 SET_DECL_VALUE_EXPR (d, v); 8712 } 8713 else 8714 SET_DECL_VALUE_EXPR (d, new_var); 8715 DECL_HAS_VALUE_EXPR_P (d) = 1; 8716 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx); 8717 if (oldv) 8718 SET_DECL_VALUE_EXPR (d, oldv); 8719 else 8720 { 8721 SET_DECL_VALUE_EXPR (d, NULL_TREE); 8722 DECL_HAS_VALUE_EXPR_P (d) = 0; 8723 } 8724 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)); 8725 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; 8726 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION) 8727 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL; 8728 if (cancellable) 8729 gimple_seq_add_stmt (end, gimple_build_label (lab6)); 8730 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var); 8731 if (x) 8732 { 8733 gimple_seq tseq = NULL; 8734 gimplify_stmt (&x, &tseq); 8735 gimple_seq_add_seq (end, tseq); 8736 } 8737 } 8738 else 8739 { 8740 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var); 8741 ref = unshare_expr (ref); 8742 gimplify_assign (ref, x, end); 8743 } 8744 gimple_seq_add_stmt (end, gimple_build_label (lab4)); 8745 ++cnt; 8746 field = DECL_CHAIN (bfield); 8747 } 8748 } 8749 8750 if (code == OMP_TASKGROUP) 8751 { 8752 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER); 8753 g = gimple_build_call (t, 1, build_fold_addr_expr (avar)); 8754 gimple_seq_add_stmt (start, g); 8755 } 8756 else 8757 { 8758 tree c; 8759 if (code == OMP_FOR) 8760 c = gimple_omp_for_clauses (ctx->stmt); 8761 else if (code == OMP_SECTIONS) 8762 c = gimple_omp_sections_clauses (ctx->stmt); 8763 else 8764 c = gimple_omp_taskreg_clauses (ctx->stmt); 8765 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_); 8766 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)), 8767 build_fold_addr_expr (avar)); 8768 gimplify_assign (OMP_CLAUSE_DECL (c), t, start); 8769 } 8770 8771 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz)); 8772 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx, 8773 size_one_node)); 8774 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2); 8775 gimple_seq_add_stmt (end, g); 8776 gimple_seq_add_stmt (end, gimple_build_label (lab2)); 8777 if (code == OMP_FOR || code == OMP_SECTIONS) 8778 { 8779 enum built_in_function bfn 8780 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER; 8781 t = builtin_decl_explicit (bfn); 8782 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t))); 8783 tree arg; 8784 if (cancellable) 8785 { 8786 arg = create_tmp_var (c_bool_type); 8787 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR, 8788 cancellable)); 8789 } 8790 else 8791 arg = build_int_cst (c_bool_type, 0); 8792 g = gimple_build_call (t, 1, arg); 8793 } 8794 else 8795 { 8796 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER); 8797 g = gimple_build_call (t, 1, build_fold_addr_expr (avar)); 8798 } 8799 gimple_seq_add_stmt (end, g); 8800 if (lab7) 8801 gimple_seq_add_stmt (end, gimple_build_label (lab7)); 8802 t = build_constructor (atype, NULL); 8803 TREE_THIS_VOLATILE (t) = 1; 8804 gimple_seq_add_stmt (end, gimple_build_assign (avar, t)); 8805 } 8806 8807 /* Expand code for an OpenMP taskgroup directive. */ 8808 8809 static void 8810 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx) 8811 { 8812 gimple *stmt = gsi_stmt (*gsi_p); 8813 gcall *x; 8814 gbind *bind; 8815 gimple_seq dseq = NULL; 8816 tree block = make_node (BLOCK); 8817 8818 bind = gimple_build_bind (NULL, NULL, block); 8819 gsi_replace (gsi_p, bind, true); 8820 gimple_bind_add_stmt (bind, stmt); 8821 8822 push_gimplify_context (); 8823 8824 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START), 8825 0); 8826 gimple_bind_add_stmt (bind, x); 8827 8828 lower_omp_task_reductions (ctx, OMP_TASKGROUP, 8829 gimple_omp_taskgroup_clauses (stmt), 8830 gimple_bind_body_ptr (bind), &dseq); 8831 8832 lower_omp (gimple_omp_body_ptr (stmt), ctx); 8833 gimple_bind_add_seq (bind, gimple_omp_body (stmt)); 8834 gimple_omp_set_body (stmt, NULL); 8835 8836 gimple_bind_add_stmt (bind, gimple_build_omp_return (true)); 8837 gimple_bind_add_seq (bind, dseq); 8838 8839 pop_gimplify_context (bind); 8840 8841 gimple_bind_append_vars (bind, ctx->block_vars); 8842 BLOCK_VARS (block) = ctx->block_vars; 8843 } 8844 8845 8846 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */ 8847 8848 static void 8849 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt, 8850 omp_context *ctx) 8851 { 8852 struct omp_for_data fd; 8853 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR) 8854 return; 8855 8856 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt); 8857 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len); 8858 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops); 8859 if (!fd.ordered) 8860 return; 8861 8862 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt); 8863 tree c = gimple_omp_ordered_clauses (ord_stmt); 8864 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND 8865 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK) 8866 { 8867 /* Merge depend clauses from multiple adjacent 8868 #pragma omp ordered depend(sink:...) constructs 8869 into one #pragma omp ordered depend(sink:...), so that 8870 we can optimize them together. */ 8871 gimple_stmt_iterator gsi = *gsi_p; 8872 gsi_next (&gsi); 8873 while (!gsi_end_p (gsi)) 8874 { 8875 gimple *stmt = gsi_stmt (gsi); 8876 if (is_gimple_debug (stmt) 8877 || gimple_code (stmt) == GIMPLE_NOP) 8878 { 8879 gsi_next (&gsi); 8880 continue; 8881 } 8882 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED) 8883 break; 8884 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt); 8885 c = gimple_omp_ordered_clauses (ord_stmt2); 8886 if (c == NULL_TREE 8887 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND 8888 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK) 8889 break; 8890 while (*list_p) 8891 list_p = &OMP_CLAUSE_CHAIN (*list_p); 8892 *list_p = c; 8893 gsi_remove (&gsi, true); 8894 } 8895 } 8896 8897 /* Canonicalize sink dependence clauses into one folded clause if 8898 possible. 8899 8900 The basic algorithm is to create a sink vector whose first 8901 element is the GCD of all the first elements, and whose remaining 8902 elements are the minimum of the subsequent columns. 8903 8904 We ignore dependence vectors whose first element is zero because 8905 such dependencies are known to be executed by the same thread. 8906 8907 We take into account the direction of the loop, so a minimum 8908 becomes a maximum if the loop is iterating forwards. We also 8909 ignore sink clauses where the loop direction is unknown, or where 8910 the offsets are clearly invalid because they are not a multiple 8911 of the loop increment. 8912 8913 For example: 8914 8915 #pragma omp for ordered(2) 8916 for (i=0; i < N; ++i) 8917 for (j=0; j < M; ++j) 8918 { 8919 #pragma omp ordered \ 8920 depend(sink:i-8,j-2) \ 8921 depend(sink:i,j-1) \ // Completely ignored because i+0. 8922 depend(sink:i-4,j-3) \ 8923 depend(sink:i-6,j-4) 8924 #pragma omp ordered depend(source) 8925 } 8926 8927 Folded clause is: 8928 8929 depend(sink:-gcd(8,4,6),-min(2,3,4)) 8930 -or- 8931 depend(sink:-2,-2) 8932 */ 8933 8934 /* FIXME: Computing GCD's where the first element is zero is 8935 non-trivial in the presence of collapsed loops. Do this later. */ 8936 if (fd.collapse > 1) 8937 return; 8938 8939 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1); 8940 8941 /* wide_int is not a POD so it must be default-constructed. */ 8942 for (unsigned i = 0; i != 2 * len - 1; ++i) 8943 new (static_cast<void*>(folded_deps + i)) wide_int (); 8944 8945 tree folded_dep = NULL_TREE; 8946 /* TRUE if the first dimension's offset is negative. */ 8947 bool neg_offset_p = false; 8948 8949 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt); 8950 unsigned int i; 8951 while ((c = *list_p) != NULL) 8952 { 8953 bool remove = false; 8954 8955 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND); 8956 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK) 8957 goto next_ordered_clause; 8958 8959 tree vec; 8960 for (vec = OMP_CLAUSE_DECL (c), i = 0; 8961 vec && TREE_CODE (vec) == TREE_LIST; 8962 vec = TREE_CHAIN (vec), ++i) 8963 { 8964 gcc_assert (i < len); 8965 8966 /* omp_extract_for_data has canonicalized the condition. */ 8967 gcc_assert (fd.loops[i].cond_code == LT_EXPR 8968 || fd.loops[i].cond_code == GT_EXPR); 8969 bool forward = fd.loops[i].cond_code == LT_EXPR; 8970 bool maybe_lexically_later = true; 8971 8972 /* While the committee makes up its mind, bail if we have any 8973 non-constant steps. */ 8974 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST) 8975 goto lower_omp_ordered_ret; 8976 8977 tree itype = TREE_TYPE (TREE_VALUE (vec)); 8978 if (POINTER_TYPE_P (itype)) 8979 itype = sizetype; 8980 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)), 8981 TYPE_PRECISION (itype), 8982 TYPE_SIGN (itype)); 8983 8984 /* Ignore invalid offsets that are not multiples of the step. */ 8985 if (!wi::multiple_of_p (wi::abs (offset), 8986 wi::abs (wi::to_wide (fd.loops[i].step)), 8987 UNSIGNED)) 8988 { 8989 warning_at (OMP_CLAUSE_LOCATION (c), 0, 8990 "ignoring sink clause with offset that is not " 8991 "a multiple of the loop step"); 8992 remove = true; 8993 goto next_ordered_clause; 8994 } 8995 8996 /* Calculate the first dimension. The first dimension of 8997 the folded dependency vector is the GCD of the first 8998 elements, while ignoring any first elements whose offset 8999 is 0. */ 9000 if (i == 0) 9001 { 9002 /* Ignore dependence vectors whose first dimension is 0. */ 9003 if (offset == 0) 9004 { 9005 remove = true; 9006 goto next_ordered_clause; 9007 } 9008 else 9009 { 9010 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset))) 9011 { 9012 error_at (OMP_CLAUSE_LOCATION (c), 9013 "first offset must be in opposite direction " 9014 "of loop iterations"); 9015 goto lower_omp_ordered_ret; 9016 } 9017 if (forward) 9018 offset = -offset; 9019 neg_offset_p = forward; 9020 /* Initialize the first time around. */ 9021 if (folded_dep == NULL_TREE) 9022 { 9023 folded_dep = c; 9024 folded_deps[0] = offset; 9025 } 9026 else 9027 folded_deps[0] = wi::gcd (folded_deps[0], 9028 offset, UNSIGNED); 9029 } 9030 } 9031 /* Calculate minimum for the remaining dimensions. */ 9032 else 9033 { 9034 folded_deps[len + i - 1] = offset; 9035 if (folded_dep == c) 9036 folded_deps[i] = offset; 9037 else if (maybe_lexically_later 9038 && !wi::eq_p (folded_deps[i], offset)) 9039 { 9040 if (forward ^ wi::gts_p (folded_deps[i], offset)) 9041 { 9042 unsigned int j; 9043 folded_dep = c; 9044 for (j = 1; j <= i; j++) 9045 folded_deps[j] = folded_deps[len + j - 1]; 9046 } 9047 else 9048 maybe_lexically_later = false; 9049 } 9050 } 9051 } 9052 gcc_assert (i == len); 9053 9054 remove = true; 9055 9056 next_ordered_clause: 9057 if (remove) 9058 *list_p = OMP_CLAUSE_CHAIN (c); 9059 else 9060 list_p = &OMP_CLAUSE_CHAIN (c); 9061 } 9062 9063 if (folded_dep) 9064 { 9065 if (neg_offset_p) 9066 folded_deps[0] = -folded_deps[0]; 9067 9068 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep))); 9069 if (POINTER_TYPE_P (itype)) 9070 itype = sizetype; 9071 9072 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep)) 9073 = wide_int_to_tree (itype, folded_deps[0]); 9074 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt); 9075 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep; 9076 } 9077 9078 lower_omp_ordered_ret: 9079 9080 /* Ordered without clauses is #pragma omp threads, while we want 9081 a nop instead if we remove all clauses. */ 9082 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE) 9083 gsi_replace (gsi_p, gimple_build_nop (), true); 9084 } 9085 9086 9087 /* Expand code for an OpenMP ordered directive. */ 9088 9089 static void 9090 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx) 9091 { 9092 tree block; 9093 gimple *stmt = gsi_stmt (*gsi_p), *g; 9094 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt); 9095 gcall *x; 9096 gbind *bind; 9097 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt), 9098 OMP_CLAUSE_SIMD); 9099 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing 9100 loop. */ 9101 bool maybe_simt 9102 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1; 9103 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt), 9104 OMP_CLAUSE_THREADS); 9105 9106 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt), 9107 OMP_CLAUSE_DEPEND)) 9108 { 9109 /* FIXME: This is needs to be moved to the expansion to verify various 9110 conditions only testable on cfg with dominators computed, and also 9111 all the depend clauses to be merged still might need to be available 9112 for the runtime checks. */ 9113 if (0) 9114 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx); 9115 return; 9116 } 9117 9118 push_gimplify_context (); 9119 9120 block = make_node (BLOCK); 9121 bind = gimple_build_bind (NULL, NULL, block); 9122 gsi_replace (gsi_p, bind, true); 9123 gimple_bind_add_stmt (bind, stmt); 9124 9125 if (simd) 9126 { 9127 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1, 9128 build_int_cst (NULL_TREE, threads)); 9129 cfun->has_simduid_loops = true; 9130 } 9131 else 9132 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START), 9133 0); 9134 gimple_bind_add_stmt (bind, x); 9135 9136 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE; 9137 if (maybe_simt) 9138 { 9139 counter = create_tmp_var (integer_type_node); 9140 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0); 9141 gimple_call_set_lhs (g, counter); 9142 gimple_bind_add_stmt (bind, g); 9143 9144 body = create_artificial_label (UNKNOWN_LOCATION); 9145 test = create_artificial_label (UNKNOWN_LOCATION); 9146 gimple_bind_add_stmt (bind, gimple_build_label (body)); 9147 9148 tree simt_pred = create_tmp_var (integer_type_node); 9149 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter); 9150 gimple_call_set_lhs (g, simt_pred); 9151 gimple_bind_add_stmt (bind, g); 9152 9153 tree t = create_artificial_label (UNKNOWN_LOCATION); 9154 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test); 9155 gimple_bind_add_stmt (bind, g); 9156 9157 gimple_bind_add_stmt (bind, gimple_build_label (t)); 9158 } 9159 lower_omp (gimple_omp_body_ptr (stmt), ctx); 9160 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt))); 9161 gimple_bind_add_seq (bind, gimple_omp_body (stmt)); 9162 gimple_omp_set_body (stmt, NULL); 9163 9164 if (maybe_simt) 9165 { 9166 gimple_bind_add_stmt (bind, gimple_build_label (test)); 9167 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node); 9168 gimple_bind_add_stmt (bind, g); 9169 9170 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node); 9171 tree nonneg = create_tmp_var (integer_type_node); 9172 gimple_seq tseq = NULL; 9173 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq); 9174 gimple_bind_add_seq (bind, tseq); 9175 9176 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg); 9177 gimple_call_set_lhs (g, nonneg); 9178 gimple_bind_add_stmt (bind, g); 9179 9180 tree end = create_artificial_label (UNKNOWN_LOCATION); 9181 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end); 9182 gimple_bind_add_stmt (bind, g); 9183 9184 gimple_bind_add_stmt (bind, gimple_build_label (end)); 9185 } 9186 if (simd) 9187 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1, 9188 build_int_cst (NULL_TREE, threads)); 9189 else 9190 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END), 9191 0); 9192 gimple_bind_add_stmt (bind, x); 9193 9194 gimple_bind_add_stmt (bind, gimple_build_omp_return (true)); 9195 9196 pop_gimplify_context (bind); 9197 9198 gimple_bind_append_vars (bind, ctx->block_vars); 9199 BLOCK_VARS (block) = gimple_bind_vars (bind); 9200 } 9201 9202 9203 /* Expand code for an OpenMP scan directive and the structured block 9204 before the scan directive. */ 9205 9206 static void 9207 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx) 9208 { 9209 gimple *stmt = gsi_stmt (*gsi_p); 9210 bool has_clauses 9211 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL; 9212 tree lane = NULL_TREE; 9213 gimple_seq before = NULL; 9214 omp_context *octx = ctx->outer; 9215 gcc_assert (octx); 9216 if (octx->scan_exclusive && !has_clauses) 9217 { 9218 gimple_stmt_iterator gsi2 = *gsi_p; 9219 gsi_next (&gsi2); 9220 gimple *stmt2 = gsi_stmt (gsi2); 9221 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses 9222 with following GIMPLE_OMP_SCAN with clauses, so that input_phase, 9223 the one with exclusive clause(s), comes first. */ 9224 if (stmt2 9225 && gimple_code (stmt2) == GIMPLE_OMP_SCAN 9226 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL) 9227 { 9228 gsi_remove (gsi_p, false); 9229 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT); 9230 ctx = maybe_lookup_ctx (stmt2); 9231 gcc_assert (ctx); 9232 lower_omp_scan (gsi_p, ctx); 9233 return; 9234 } 9235 } 9236 9237 bool input_phase = has_clauses ^ octx->scan_inclusive; 9238 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR 9239 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD); 9240 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR 9241 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR 9242 && !gimple_omp_for_combined_p (octx->stmt)); 9243 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt); 9244 if (is_for_simd && octx->for_simd_scan_phase) 9245 is_simd = false; 9246 if (is_simd) 9247 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt), 9248 OMP_CLAUSE__SIMDUID_)) 9249 { 9250 tree uid = OMP_CLAUSE__SIMDUID__DECL (c); 9251 lane = create_tmp_var (unsigned_type_node); 9252 tree t = build_int_cst (integer_type_node, 9253 input_phase ? 1 9254 : octx->scan_inclusive ? 2 : 3); 9255 gimple *g 9256 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t); 9257 gimple_call_set_lhs (g, lane); 9258 gimple_seq_add_stmt (&before, g); 9259 } 9260 9261 if (is_simd || is_for) 9262 { 9263 for (tree c = gimple_omp_for_clauses (octx->stmt); 9264 c; c = OMP_CLAUSE_CHAIN (c)) 9265 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION 9266 && OMP_CLAUSE_REDUCTION_INSCAN (c)) 9267 { 9268 location_t clause_loc = OMP_CLAUSE_LOCATION (c); 9269 tree var = OMP_CLAUSE_DECL (c); 9270 tree new_var = lookup_decl (var, octx); 9271 tree val = new_var; 9272 tree var2 = NULL_TREE; 9273 tree var3 = NULL_TREE; 9274 tree var4 = NULL_TREE; 9275 tree lane0 = NULL_TREE; 9276 tree new_vard = new_var; 9277 if (omp_is_reference (var)) 9278 { 9279 new_var = build_simple_mem_ref_loc (clause_loc, new_var); 9280 val = new_var; 9281 } 9282 if (DECL_HAS_VALUE_EXPR_P (new_vard)) 9283 { 9284 val = DECL_VALUE_EXPR (new_vard); 9285 if (new_vard != new_var) 9286 { 9287 gcc_assert (TREE_CODE (val) == ADDR_EXPR); 9288 val = TREE_OPERAND (val, 0); 9289 } 9290 if (TREE_CODE (val) == ARRAY_REF 9291 && VAR_P (TREE_OPERAND (val, 0))) 9292 { 9293 tree v = TREE_OPERAND (val, 0); 9294 if (lookup_attribute ("omp simd array", 9295 DECL_ATTRIBUTES (v))) 9296 { 9297 val = unshare_expr (val); 9298 lane0 = TREE_OPERAND (val, 1); 9299 TREE_OPERAND (val, 1) = lane; 9300 var2 = lookup_decl (v, octx); 9301 if (octx->scan_exclusive) 9302 var4 = lookup_decl (var2, octx); 9303 if (input_phase 9304 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 9305 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx); 9306 if (!input_phase) 9307 { 9308 var2 = build4 (ARRAY_REF, TREE_TYPE (val), 9309 var2, lane, NULL_TREE, NULL_TREE); 9310 TREE_THIS_NOTRAP (var2) = 1; 9311 if (octx->scan_exclusive) 9312 { 9313 var4 = build4 (ARRAY_REF, TREE_TYPE (val), 9314 var4, lane, NULL_TREE, 9315 NULL_TREE); 9316 TREE_THIS_NOTRAP (var4) = 1; 9317 } 9318 } 9319 else 9320 var2 = val; 9321 } 9322 } 9323 gcc_assert (var2); 9324 } 9325 else 9326 { 9327 var2 = build_outer_var_ref (var, octx); 9328 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 9329 { 9330 var3 = maybe_lookup_decl (new_vard, octx); 9331 if (var3 == new_vard || var3 == NULL_TREE) 9332 var3 = NULL_TREE; 9333 else if (is_simd && octx->scan_exclusive && !input_phase) 9334 { 9335 var4 = maybe_lookup_decl (var3, octx); 9336 if (var4 == var3 || var4 == NULL_TREE) 9337 { 9338 if (TREE_ADDRESSABLE (TREE_TYPE (new_var))) 9339 { 9340 var4 = var3; 9341 var3 = NULL_TREE; 9342 } 9343 else 9344 var4 = NULL_TREE; 9345 } 9346 } 9347 } 9348 if (is_simd 9349 && octx->scan_exclusive 9350 && !input_phase 9351 && var4 == NULL_TREE) 9352 var4 = create_tmp_var (TREE_TYPE (val)); 9353 } 9354 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 9355 { 9356 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); 9357 if (input_phase) 9358 { 9359 if (var3) 9360 { 9361 /* If we've added a separate identity element 9362 variable, copy it over into val. */ 9363 tree x = lang_hooks.decls.omp_clause_assign_op (c, val, 9364 var3); 9365 gimplify_and_add (x, &before); 9366 } 9367 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)) 9368 { 9369 /* Otherwise, assign to it the identity element. */ 9370 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c); 9371 if (is_for) 9372 tseq = copy_gimple_seq_and_replace_locals (tseq); 9373 tree ref = build_outer_var_ref (var, octx); 9374 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard) 9375 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE); 9376 if (x) 9377 { 9378 if (new_vard != new_var) 9379 val = build_fold_addr_expr_loc (clause_loc, val); 9380 SET_DECL_VALUE_EXPR (new_vard, val); 9381 } 9382 SET_DECL_VALUE_EXPR (placeholder, ref); 9383 DECL_HAS_VALUE_EXPR_P (placeholder) = 1; 9384 lower_omp (&tseq, octx); 9385 if (x) 9386 SET_DECL_VALUE_EXPR (new_vard, x); 9387 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE); 9388 DECL_HAS_VALUE_EXPR_P (placeholder) = 0; 9389 gimple_seq_add_seq (&before, tseq); 9390 if (is_simd) 9391 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL; 9392 } 9393 } 9394 else if (is_simd) 9395 { 9396 tree x; 9397 if (octx->scan_exclusive) 9398 { 9399 tree v4 = unshare_expr (var4); 9400 tree v2 = unshare_expr (var2); 9401 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2); 9402 gimplify_and_add (x, &before); 9403 } 9404 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c); 9405 x = (DECL_HAS_VALUE_EXPR_P (new_vard) 9406 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE); 9407 tree vexpr = val; 9408 if (x && new_vard != new_var) 9409 vexpr = build_fold_addr_expr_loc (clause_loc, val); 9410 if (x) 9411 SET_DECL_VALUE_EXPR (new_vard, vexpr); 9412 SET_DECL_VALUE_EXPR (placeholder, var2); 9413 DECL_HAS_VALUE_EXPR_P (placeholder) = 1; 9414 lower_omp (&tseq, octx); 9415 gimple_seq_add_seq (&before, tseq); 9416 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; 9417 if (x) 9418 SET_DECL_VALUE_EXPR (new_vard, x); 9419 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE); 9420 DECL_HAS_VALUE_EXPR_P (placeholder) = 0; 9421 if (octx->scan_inclusive) 9422 { 9423 x = lang_hooks.decls.omp_clause_assign_op (c, val, 9424 var2); 9425 gimplify_and_add (x, &before); 9426 } 9427 else if (lane0 == NULL_TREE) 9428 { 9429 x = lang_hooks.decls.omp_clause_assign_op (c, val, 9430 var4); 9431 gimplify_and_add (x, &before); 9432 } 9433 } 9434 } 9435 else 9436 { 9437 if (input_phase) 9438 { 9439 /* input phase. Set val to initializer before 9440 the body. */ 9441 tree x = omp_reduction_init (c, TREE_TYPE (new_var)); 9442 gimplify_assign (val, x, &before); 9443 } 9444 else if (is_simd) 9445 { 9446 /* scan phase. */ 9447 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c); 9448 if (code == MINUS_EXPR) 9449 code = PLUS_EXPR; 9450 9451 tree x = build2 (code, TREE_TYPE (var2), 9452 unshare_expr (var2), unshare_expr (val)); 9453 if (octx->scan_inclusive) 9454 { 9455 gimplify_assign (unshare_expr (var2), x, &before); 9456 gimplify_assign (val, var2, &before); 9457 } 9458 else 9459 { 9460 gimplify_assign (unshare_expr (var4), 9461 unshare_expr (var2), &before); 9462 gimplify_assign (var2, x, &before); 9463 if (lane0 == NULL_TREE) 9464 gimplify_assign (val, var4, &before); 9465 } 9466 } 9467 } 9468 if (octx->scan_exclusive && !input_phase && lane0) 9469 { 9470 tree vexpr = unshare_expr (var4); 9471 TREE_OPERAND (vexpr, 1) = lane0; 9472 if (new_vard != new_var) 9473 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr); 9474 SET_DECL_VALUE_EXPR (new_vard, vexpr); 9475 } 9476 } 9477 } 9478 if (is_simd && !is_for_simd) 9479 { 9480 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT); 9481 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT); 9482 gsi_replace (gsi_p, gimple_build_nop (), true); 9483 return; 9484 } 9485 lower_omp (gimple_omp_body_ptr (stmt), octx); 9486 if (before) 9487 { 9488 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt)); 9489 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT); 9490 } 9491 } 9492 9493 9494 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple 9495 substitution of a couple of function calls. But in the NAMED case, 9496 requires that languages coordinate a symbol name. It is therefore 9497 best put here in common code. */ 9498 9499 static GTY(()) hash_map<tree, tree> *critical_name_mutexes; 9500 9501 static void 9502 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx) 9503 { 9504 tree block; 9505 tree name, lock, unlock; 9506 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p)); 9507 gbind *bind; 9508 location_t loc = gimple_location (stmt); 9509 gimple_seq tbody; 9510 9511 name = gimple_omp_critical_name (stmt); 9512 if (name) 9513 { 9514 tree decl; 9515 9516 if (!critical_name_mutexes) 9517 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10); 9518 9519 tree *n = critical_name_mutexes->get (name); 9520 if (n == NULL) 9521 { 9522 char *new_str; 9523 9524 decl = create_tmp_var_raw (ptr_type_node); 9525 9526 new_str = ACONCAT ((".gomp_critical_user_", 9527 IDENTIFIER_POINTER (name), NULL)); 9528 DECL_NAME (decl) = get_identifier (new_str); 9529 TREE_PUBLIC (decl) = 1; 9530 TREE_STATIC (decl) = 1; 9531 DECL_COMMON (decl) = 1; 9532 DECL_ARTIFICIAL (decl) = 1; 9533 DECL_IGNORED_P (decl) = 1; 9534 9535 varpool_node::finalize_decl (decl); 9536 9537 critical_name_mutexes->put (name, decl); 9538 } 9539 else 9540 decl = *n; 9541 9542 /* If '#pragma omp critical' is inside offloaded region or 9543 inside function marked as offloadable, the symbol must be 9544 marked as offloadable too. */ 9545 omp_context *octx; 9546 if (cgraph_node::get (current_function_decl)->offloadable) 9547 varpool_node::get_create (decl)->offloadable = 1; 9548 else 9549 for (octx = ctx->outer; octx; octx = octx->outer) 9550 if (is_gimple_omp_offloaded (octx->stmt)) 9551 { 9552 varpool_node::get_create (decl)->offloadable = 1; 9553 break; 9554 } 9555 9556 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START); 9557 lock = build_call_expr_loc (loc, lock, 1, 9558 build_fold_addr_expr_loc (loc, decl)); 9559 9560 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END); 9561 unlock = build_call_expr_loc (loc, unlock, 1, 9562 build_fold_addr_expr_loc (loc, decl)); 9563 } 9564 else 9565 { 9566 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START); 9567 lock = build_call_expr_loc (loc, lock, 0); 9568 9569 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END); 9570 unlock = build_call_expr_loc (loc, unlock, 0); 9571 } 9572 9573 push_gimplify_context (); 9574 9575 block = make_node (BLOCK); 9576 bind = gimple_build_bind (NULL, NULL, block); 9577 gsi_replace (gsi_p, bind, true); 9578 gimple_bind_add_stmt (bind, stmt); 9579 9580 tbody = gimple_bind_body (bind); 9581 gimplify_and_add (lock, &tbody); 9582 gimple_bind_set_body (bind, tbody); 9583 9584 lower_omp (gimple_omp_body_ptr (stmt), ctx); 9585 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt))); 9586 gimple_bind_add_seq (bind, gimple_omp_body (stmt)); 9587 gimple_omp_set_body (stmt, NULL); 9588 9589 tbody = gimple_bind_body (bind); 9590 gimplify_and_add (unlock, &tbody); 9591 gimple_bind_set_body (bind, tbody); 9592 9593 gimple_bind_add_stmt (bind, gimple_build_omp_return (true)); 9594 9595 pop_gimplify_context (bind); 9596 gimple_bind_append_vars (bind, ctx->block_vars); 9597 BLOCK_VARS (block) = gimple_bind_vars (bind); 9598 } 9599 9600 /* A subroutine of lower_omp_for. Generate code to emit the predicate 9601 for a lastprivate clause. Given a loop control predicate of (V 9602 cond N2), we gate the clause on (!(V cond N2)). The lowered form 9603 is appended to *DLIST, iterator initialization is appended to 9604 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs 9605 to be emitted in a critical section. */ 9606 9607 static void 9608 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p, 9609 gimple_seq *dlist, gimple_seq *clist, 9610 struct omp_context *ctx) 9611 { 9612 tree clauses, cond, vinit; 9613 enum tree_code cond_code; 9614 gimple_seq stmts; 9615 9616 cond_code = fd->loop.cond_code; 9617 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR; 9618 9619 /* When possible, use a strict equality expression. This can let VRP 9620 type optimizations deduce the value and remove a copy. */ 9621 if (tree_fits_shwi_p (fd->loop.step)) 9622 { 9623 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step); 9624 if (step == 1 || step == -1) 9625 cond_code = EQ_EXPR; 9626 } 9627 9628 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP 9629 || gimple_omp_for_grid_phony (fd->for_stmt)) 9630 cond = omp_grid_lastprivate_predicate (fd); 9631 else 9632 { 9633 tree n2 = fd->loop.n2; 9634 if (fd->collapse > 1 9635 && TREE_CODE (n2) != INTEGER_CST 9636 && gimple_omp_for_combined_into_p (fd->for_stmt)) 9637 { 9638 struct omp_context *taskreg_ctx = NULL; 9639 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR) 9640 { 9641 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt); 9642 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR 9643 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE) 9644 { 9645 if (gimple_omp_for_combined_into_p (gfor)) 9646 { 9647 gcc_assert (ctx->outer->outer 9648 && is_parallel_ctx (ctx->outer->outer)); 9649 taskreg_ctx = ctx->outer->outer; 9650 } 9651 else 9652 { 9653 struct omp_for_data outer_fd; 9654 omp_extract_for_data (gfor, &outer_fd, NULL); 9655 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2); 9656 } 9657 } 9658 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP) 9659 taskreg_ctx = ctx->outer->outer; 9660 } 9661 else if (is_taskreg_ctx (ctx->outer)) 9662 taskreg_ctx = ctx->outer; 9663 if (taskreg_ctx) 9664 { 9665 int i; 9666 tree taskreg_clauses 9667 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt); 9668 tree innerc = omp_find_clause (taskreg_clauses, 9669 OMP_CLAUSE__LOOPTEMP_); 9670 gcc_assert (innerc); 9671 for (i = 0; i < fd->collapse; i++) 9672 { 9673 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc), 9674 OMP_CLAUSE__LOOPTEMP_); 9675 gcc_assert (innerc); 9676 } 9677 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc), 9678 OMP_CLAUSE__LOOPTEMP_); 9679 if (innerc) 9680 n2 = fold_convert (TREE_TYPE (n2), 9681 lookup_decl (OMP_CLAUSE_DECL (innerc), 9682 taskreg_ctx)); 9683 } 9684 } 9685 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2); 9686 } 9687 9688 clauses = gimple_omp_for_clauses (fd->for_stmt); 9689 stmts = NULL; 9690 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx); 9691 if (!gimple_seq_empty_p (stmts)) 9692 { 9693 gimple_seq_add_seq (&stmts, *dlist); 9694 *dlist = stmts; 9695 9696 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */ 9697 vinit = fd->loop.n1; 9698 if (cond_code == EQ_EXPR 9699 && tree_fits_shwi_p (fd->loop.n2) 9700 && ! integer_zerop (fd->loop.n2)) 9701 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0); 9702 else 9703 vinit = unshare_expr (vinit); 9704 9705 /* Initialize the iterator variable, so that threads that don't execute 9706 any iterations don't execute the lastprivate clauses by accident. */ 9707 gimplify_assign (fd->loop.v, vinit, body_p); 9708 } 9709 } 9710 9711 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */ 9712 9713 static tree 9714 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, 9715 struct walk_stmt_info *wi) 9716 { 9717 gimple *stmt = gsi_stmt (*gsi_p); 9718 9719 *handled_ops_p = true; 9720 switch (gimple_code (stmt)) 9721 { 9722 WALK_SUBSTMTS; 9723 9724 case GIMPLE_OMP_FOR: 9725 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD 9726 && gimple_omp_for_combined_into_p (stmt)) 9727 *handled_ops_p = false; 9728 break; 9729 9730 case GIMPLE_OMP_SCAN: 9731 *(gimple_stmt_iterator *) (wi->info) = *gsi_p; 9732 return integer_zero_node; 9733 default: 9734 break; 9735 } 9736 return NULL; 9737 } 9738 9739 /* Helper function for lower_omp_for, add transformations for a worksharing 9740 loop with scan directives inside of it. 9741 For worksharing loop not combined with simd, transform: 9742 #pragma omp for reduction(inscan,+:r) private(i) 9743 for (i = 0; i < n; i = i + 1) 9744 { 9745 { 9746 update (r); 9747 } 9748 #pragma omp scan inclusive(r) 9749 { 9750 use (r); 9751 } 9752 } 9753 9754 into two worksharing loops + code to merge results: 9755 9756 num_threads = omp_get_num_threads (); 9757 thread_num = omp_get_thread_num (); 9758 if (thread_num == 0) goto <D.2099>; else goto <D.2100>; 9759 <D.2099>: 9760 var2 = r; 9761 goto <D.2101>; 9762 <D.2100>: 9763 // For UDRs this is UDR init, or if ctors are needed, copy from 9764 // var3 that has been constructed to contain the neutral element. 9765 var2 = 0; 9766 <D.2101>: 9767 ivar = 0; 9768 // The _scantemp_ clauses will arrange for rpriva to be initialized to 9769 // a shared array with num_threads elements and rprivb to a local array 9770 // number of elements equal to the number of (contiguous) iterations the 9771 // current thread will perform. controlb and controlp variables are 9772 // temporaries to handle deallocation of rprivb at the end of second 9773 // GOMP_FOR. 9774 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \ 9775 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait 9776 for (i = 0; i < n; i = i + 1) 9777 { 9778 { 9779 // For UDRs this is UDR init or copy from var3. 9780 r = 0; 9781 // This is the input phase from user code. 9782 update (r); 9783 } 9784 { 9785 // For UDRs this is UDR merge. 9786 var2 = var2 + r; 9787 // Rather than handing it over to the user, save to local thread's 9788 // array. 9789 rprivb[ivar] = var2; 9790 // For exclusive scan, the above two statements are swapped. 9791 ivar = ivar + 1; 9792 } 9793 } 9794 // And remember the final value from this thread's into the shared 9795 // rpriva array. 9796 rpriva[(sizetype) thread_num] = var2; 9797 // If more than one thread, compute using Work-Efficient prefix sum 9798 // the inclusive parallel scan of the rpriva array. 9799 if (num_threads > 1) goto <D.2102>; else goto <D.2103>; 9800 <D.2102>: 9801 GOMP_barrier (); 9802 down = 0; 9803 k = 1; 9804 num_threadsu = (unsigned int) num_threads; 9805 thread_numup1 = (unsigned int) thread_num + 1; 9806 <D.2108>: 9807 twok = k << 1; 9808 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>; 9809 <D.2110>: 9810 down = 4294967295; 9811 k = k >> 1; 9812 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>; 9813 <D.2112>: 9814 k = k >> 1; 9815 <D.2111>: 9816 twok = k << 1; 9817 cplx = .MUL_OVERFLOW (thread_nump1, twok); 9818 mul = REALPART_EXPR <cplx>; 9819 ovf = IMAGPART_EXPR <cplx>; 9820 if (ovf == 0) goto <D.2116>; else goto <D.2117>; 9821 <D.2116>: 9822 andv = k & down; 9823 andvm1 = andv + 4294967295; 9824 l = mul + andvm1; 9825 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>; 9826 <D.2120>: 9827 // For UDRs this is UDR merge, performed using var2 variable as temporary, 9828 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2; 9829 rpriva[l] = rpriva[l - k] + rpriva[l]; 9830 <D.2117>: 9831 if (down == 0) goto <D.2121>; else goto <D.2122>; 9832 <D.2121>: 9833 k = k << 1; 9834 goto <D.2123>; 9835 <D.2122>: 9836 k = k >> 1; 9837 <D.2123>: 9838 GOMP_barrier (); 9839 if (k != 0) goto <D.2108>; else goto <D.2103>; 9840 <D.2103>: 9841 if (thread_num == 0) goto <D.2124>; else goto <D.2125>; 9842 <D.2124>: 9843 // For UDRs this is UDR init or copy from var3. 9844 var2 = 0; 9845 goto <D.2126>; 9846 <D.2125>: 9847 var2 = rpriva[thread_num - 1]; 9848 <D.2126>: 9849 ivar = 0; 9850 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \ 9851 reduction(inscan,+:r) private(i) 9852 for (i = 0; i < n; i = i + 1) 9853 { 9854 { 9855 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]); 9856 r = var2 + rprivb[ivar]; 9857 } 9858 { 9859 // This is the scan phase from user code. 9860 use (r); 9861 // Plus a bump of the iterator. 9862 ivar = ivar + 1; 9863 } 9864 } */ 9865 9866 static void 9867 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt, 9868 struct omp_for_data *fd, omp_context *ctx) 9869 { 9870 bool is_for_simd = gimple_omp_for_combined_p (stmt); 9871 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive); 9872 9873 gimple_seq body = gimple_omp_body (stmt); 9874 gimple_stmt_iterator input1_gsi = gsi_none (); 9875 struct walk_stmt_info wi; 9876 memset (&wi, 0, sizeof (wi)); 9877 wi.val_only = true; 9878 wi.info = (void *) &input1_gsi; 9879 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi); 9880 gcc_assert (!gsi_end_p (input1_gsi)); 9881 9882 gimple *input_stmt1 = gsi_stmt (input1_gsi); 9883 gimple_stmt_iterator gsi = input1_gsi; 9884 gsi_next (&gsi); 9885 gimple_stmt_iterator scan1_gsi = gsi; 9886 gimple *scan_stmt1 = gsi_stmt (gsi); 9887 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN); 9888 9889 gimple_seq input_body = gimple_omp_body (input_stmt1); 9890 gimple_seq scan_body = gimple_omp_body (scan_stmt1); 9891 gimple_omp_set_body (input_stmt1, NULL); 9892 gimple_omp_set_body (scan_stmt1, NULL); 9893 gimple_omp_set_body (stmt, NULL); 9894 9895 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt)); 9896 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body); 9897 gimple_omp_set_body (stmt, body); 9898 gimple_omp_set_body (input_stmt1, input_body); 9899 9900 gimple_stmt_iterator input2_gsi = gsi_none (); 9901 memset (&wi, 0, sizeof (wi)); 9902 wi.val_only = true; 9903 wi.info = (void *) &input2_gsi; 9904 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi); 9905 gcc_assert (!gsi_end_p (input2_gsi)); 9906 9907 gimple *input_stmt2 = gsi_stmt (input2_gsi); 9908 gsi = input2_gsi; 9909 gsi_next (&gsi); 9910 gimple_stmt_iterator scan2_gsi = gsi; 9911 gimple *scan_stmt2 = gsi_stmt (gsi); 9912 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN); 9913 gimple_omp_set_body (scan_stmt2, scan_body); 9914 9915 gimple_stmt_iterator input3_gsi = gsi_none (); 9916 gimple_stmt_iterator scan3_gsi = gsi_none (); 9917 gimple_stmt_iterator input4_gsi = gsi_none (); 9918 gimple_stmt_iterator scan4_gsi = gsi_none (); 9919 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL; 9920 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL; 9921 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL; 9922 if (is_for_simd) 9923 { 9924 memset (&wi, 0, sizeof (wi)); 9925 wi.val_only = true; 9926 wi.info = (void *) &input3_gsi; 9927 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi); 9928 gcc_assert (!gsi_end_p (input3_gsi)); 9929 9930 input_stmt3 = gsi_stmt (input3_gsi); 9931 gsi = input3_gsi; 9932 gsi_next (&gsi); 9933 scan3_gsi = gsi; 9934 scan_stmt3 = gsi_stmt (gsi); 9935 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN); 9936 9937 memset (&wi, 0, sizeof (wi)); 9938 wi.val_only = true; 9939 wi.info = (void *) &input4_gsi; 9940 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi); 9941 gcc_assert (!gsi_end_p (input4_gsi)); 9942 9943 input_stmt4 = gsi_stmt (input4_gsi); 9944 gsi = input4_gsi; 9945 gsi_next (&gsi); 9946 scan4_gsi = gsi; 9947 scan_stmt4 = gsi_stmt (gsi); 9948 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN); 9949 9950 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer; 9951 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer; 9952 } 9953 9954 tree num_threads = create_tmp_var (integer_type_node); 9955 tree thread_num = create_tmp_var (integer_type_node); 9956 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS); 9957 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM); 9958 gimple *g = gimple_build_call (nthreads_decl, 0); 9959 gimple_call_set_lhs (g, num_threads); 9960 gimple_seq_add_stmt (body_p, g); 9961 g = gimple_build_call (threadnum_decl, 0); 9962 gimple_call_set_lhs (g, thread_num); 9963 gimple_seq_add_stmt (body_p, g); 9964 9965 tree ivar = create_tmp_var (sizetype); 9966 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE; 9967 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2; 9968 tree k = create_tmp_var (unsigned_type_node); 9969 tree l = create_tmp_var (unsigned_type_node); 9970 9971 gimple_seq clist = NULL, mdlist = NULL; 9972 gimple_seq thr01_list = NULL, thrn1_list = NULL; 9973 gimple_seq thr02_list = NULL, thrn2_list = NULL; 9974 gimple_seq scan1_list = NULL, input2_list = NULL; 9975 gimple_seq last_list = NULL, reduc_list = NULL; 9976 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c)) 9977 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION 9978 && OMP_CLAUSE_REDUCTION_INSCAN (c)) 9979 { 9980 location_t clause_loc = OMP_CLAUSE_LOCATION (c); 9981 tree var = OMP_CLAUSE_DECL (c); 9982 tree new_var = lookup_decl (var, ctx); 9983 tree var3 = NULL_TREE; 9984 tree new_vard = new_var; 9985 if (omp_is_reference (var)) 9986 new_var = build_simple_mem_ref_loc (clause_loc, new_var); 9987 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 9988 { 9989 var3 = maybe_lookup_decl (new_vard, ctx); 9990 if (var3 == new_vard) 9991 var3 = NULL_TREE; 9992 } 9993 9994 tree ptype = build_pointer_type (TREE_TYPE (new_var)); 9995 tree rpriva = create_tmp_var (ptype); 9996 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_); 9997 OMP_CLAUSE_DECL (nc) = rpriva; 9998 *cp1 = nc; 9999 cp1 = &OMP_CLAUSE_CHAIN (nc); 10000 10001 tree rprivb = create_tmp_var (ptype); 10002 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_); 10003 OMP_CLAUSE_DECL (nc) = rprivb; 10004 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1; 10005 *cp1 = nc; 10006 cp1 = &OMP_CLAUSE_CHAIN (nc); 10007 10008 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var)); 10009 if (new_vard != new_var) 10010 TREE_ADDRESSABLE (var2) = 1; 10011 gimple_add_tmp_var (var2); 10012 10013 tree x = fold_convert_loc (clause_loc, sizetype, thread_num); 10014 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x, 10015 TYPE_SIZE_UNIT (TREE_TYPE (ptype))); 10016 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x); 10017 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x); 10018 10019 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node, 10020 thread_num, integer_minus_one_node); 10021 x = fold_convert_loc (clause_loc, sizetype, x); 10022 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x, 10023 TYPE_SIZE_UNIT (TREE_TYPE (ptype))); 10024 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x); 10025 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x); 10026 10027 x = fold_convert_loc (clause_loc, sizetype, l); 10028 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x, 10029 TYPE_SIZE_UNIT (TREE_TYPE (ptype))); 10030 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x); 10031 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x); 10032 10033 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k); 10034 x = fold_convert_loc (clause_loc, sizetype, x); 10035 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x, 10036 TYPE_SIZE_UNIT (TREE_TYPE (ptype))); 10037 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x); 10038 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x); 10039 10040 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar, 10041 TYPE_SIZE_UNIT (TREE_TYPE (ptype))); 10042 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x); 10043 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x); 10044 10045 tree var4 = is_for_simd ? new_var : var2; 10046 tree var5 = NULL_TREE, var6 = NULL_TREE; 10047 if (is_for_simd) 10048 { 10049 var5 = lookup_decl (var, input_simd_ctx); 10050 var6 = lookup_decl (var, scan_simd_ctx); 10051 if (new_vard != new_var) 10052 { 10053 var5 = build_simple_mem_ref_loc (clause_loc, var5); 10054 var6 = build_simple_mem_ref_loc (clause_loc, var6); 10055 } 10056 } 10057 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)) 10058 { 10059 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c); 10060 tree val = var2; 10061 10062 x = lang_hooks.decls.omp_clause_default_ctor 10063 (c, var2, build_outer_var_ref (var, ctx)); 10064 if (x) 10065 gimplify_and_add (x, &clist); 10066 10067 x = build_outer_var_ref (var, ctx); 10068 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4), 10069 x); 10070 gimplify_and_add (x, &thr01_list); 10071 10072 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard) 10073 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE); 10074 if (var3) 10075 { 10076 x = unshare_expr (var4); 10077 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3); 10078 gimplify_and_add (x, &thrn1_list); 10079 x = unshare_expr (var4); 10080 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3); 10081 gimplify_and_add (x, &thr02_list); 10082 } 10083 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)) 10084 { 10085 /* Otherwise, assign to it the identity element. */ 10086 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c); 10087 tseq = copy_gimple_seq_and_replace_locals (tseq); 10088 if (!is_for_simd) 10089 { 10090 if (new_vard != new_var) 10091 val = build_fold_addr_expr_loc (clause_loc, val); 10092 SET_DECL_VALUE_EXPR (new_vard, val); 10093 DECL_HAS_VALUE_EXPR_P (new_vard) = 1; 10094 } 10095 SET_DECL_VALUE_EXPR (placeholder, error_mark_node); 10096 DECL_HAS_VALUE_EXPR_P (placeholder) = 1; 10097 lower_omp (&tseq, ctx); 10098 gimple_seq_add_seq (&thrn1_list, tseq); 10099 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c); 10100 lower_omp (&tseq, ctx); 10101 gimple_seq_add_seq (&thr02_list, tseq); 10102 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE); 10103 DECL_HAS_VALUE_EXPR_P (placeholder) = 0; 10104 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL; 10105 if (y) 10106 SET_DECL_VALUE_EXPR (new_vard, y); 10107 else 10108 { 10109 DECL_HAS_VALUE_EXPR_P (new_vard) = 0; 10110 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE); 10111 } 10112 } 10113 10114 x = unshare_expr (var4); 10115 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref); 10116 gimplify_and_add (x, &thrn2_list); 10117 10118 if (is_for_simd) 10119 { 10120 x = unshare_expr (rprivb_ref); 10121 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5); 10122 gimplify_and_add (x, &scan1_list); 10123 } 10124 else 10125 { 10126 if (ctx->scan_exclusive) 10127 { 10128 x = unshare_expr (rprivb_ref); 10129 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2); 10130 gimplify_and_add (x, &scan1_list); 10131 } 10132 10133 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c); 10134 tseq = copy_gimple_seq_and_replace_locals (tseq); 10135 SET_DECL_VALUE_EXPR (placeholder, var2); 10136 DECL_HAS_VALUE_EXPR_P (placeholder) = 1; 10137 lower_omp (&tseq, ctx); 10138 gimple_seq_add_seq (&scan1_list, tseq); 10139 10140 if (ctx->scan_inclusive) 10141 { 10142 x = unshare_expr (rprivb_ref); 10143 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2); 10144 gimplify_and_add (x, &scan1_list); 10145 } 10146 } 10147 10148 x = unshare_expr (rpriva_ref); 10149 x = lang_hooks.decls.omp_clause_assign_op (c, x, 10150 unshare_expr (var4)); 10151 gimplify_and_add (x, &mdlist); 10152 10153 x = unshare_expr (is_for_simd ? var6 : new_var); 10154 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4); 10155 gimplify_and_add (x, &input2_list); 10156 10157 val = rprivb_ref; 10158 if (new_vard != new_var) 10159 val = build_fold_addr_expr_loc (clause_loc, val); 10160 10161 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c); 10162 tseq = copy_gimple_seq_and_replace_locals (tseq); 10163 SET_DECL_VALUE_EXPR (new_vard, val); 10164 DECL_HAS_VALUE_EXPR_P (new_vard) = 1; 10165 if (is_for_simd) 10166 { 10167 SET_DECL_VALUE_EXPR (placeholder, var6); 10168 DECL_HAS_VALUE_EXPR_P (placeholder) = 1; 10169 } 10170 else 10171 DECL_HAS_VALUE_EXPR_P (placeholder) = 0; 10172 lower_omp (&tseq, ctx); 10173 if (y) 10174 SET_DECL_VALUE_EXPR (new_vard, y); 10175 else 10176 { 10177 DECL_HAS_VALUE_EXPR_P (new_vard) = 0; 10178 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE); 10179 } 10180 if (!is_for_simd) 10181 { 10182 SET_DECL_VALUE_EXPR (placeholder, new_var); 10183 DECL_HAS_VALUE_EXPR_P (placeholder) = 1; 10184 lower_omp (&tseq, ctx); 10185 } 10186 gimple_seq_add_seq (&input2_list, tseq); 10187 10188 x = build_outer_var_ref (var, ctx); 10189 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref); 10190 gimplify_and_add (x, &last_list); 10191 10192 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref); 10193 gimplify_and_add (x, &reduc_list); 10194 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c); 10195 tseq = copy_gimple_seq_and_replace_locals (tseq); 10196 val = rprival_ref; 10197 if (new_vard != new_var) 10198 val = build_fold_addr_expr_loc (clause_loc, val); 10199 SET_DECL_VALUE_EXPR (new_vard, val); 10200 DECL_HAS_VALUE_EXPR_P (new_vard) = 1; 10201 SET_DECL_VALUE_EXPR (placeholder, var2); 10202 lower_omp (&tseq, ctx); 10203 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL; 10204 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE); 10205 DECL_HAS_VALUE_EXPR_P (placeholder) = 0; 10206 if (y) 10207 SET_DECL_VALUE_EXPR (new_vard, y); 10208 else 10209 { 10210 DECL_HAS_VALUE_EXPR_P (new_vard) = 0; 10211 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE); 10212 } 10213 gimple_seq_add_seq (&reduc_list, tseq); 10214 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2); 10215 gimplify_and_add (x, &reduc_list); 10216 10217 x = lang_hooks.decls.omp_clause_dtor (c, var2); 10218 if (x) 10219 gimplify_and_add (x, dlist); 10220 } 10221 else 10222 { 10223 x = build_outer_var_ref (var, ctx); 10224 gimplify_assign (unshare_expr (var4), x, &thr01_list); 10225 10226 x = omp_reduction_init (c, TREE_TYPE (new_var)); 10227 gimplify_assign (unshare_expr (var4), unshare_expr (x), 10228 &thrn1_list); 10229 gimplify_assign (unshare_expr (var4), x, &thr02_list); 10230 10231 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list); 10232 10233 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c); 10234 if (code == MINUS_EXPR) 10235 code = PLUS_EXPR; 10236 10237 if (is_for_simd) 10238 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list); 10239 else 10240 { 10241 if (ctx->scan_exclusive) 10242 gimplify_assign (unshare_expr (rprivb_ref), var2, 10243 &scan1_list); 10244 x = build2 (code, TREE_TYPE (new_var), var2, new_var); 10245 gimplify_assign (var2, x, &scan1_list); 10246 if (ctx->scan_inclusive) 10247 gimplify_assign (unshare_expr (rprivb_ref), var2, 10248 &scan1_list); 10249 } 10250 10251 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4), 10252 &mdlist); 10253 10254 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref); 10255 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list); 10256 10257 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref, 10258 &last_list); 10259 10260 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref, 10261 unshare_expr (rprival_ref)); 10262 gimplify_assign (rprival_ref, x, &reduc_list); 10263 } 10264 } 10265 10266 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node); 10267 gimple_seq_add_stmt (&scan1_list, g); 10268 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node); 10269 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd 10270 ? scan_stmt4 : scan_stmt2), g); 10271 10272 tree controlb = create_tmp_var (boolean_type_node); 10273 tree controlp = create_tmp_var (ptr_type_node); 10274 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_); 10275 OMP_CLAUSE_DECL (nc) = controlb; 10276 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1; 10277 *cp1 = nc; 10278 cp1 = &OMP_CLAUSE_CHAIN (nc); 10279 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_); 10280 OMP_CLAUSE_DECL (nc) = controlp; 10281 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1; 10282 *cp1 = nc; 10283 cp1 = &OMP_CLAUSE_CHAIN (nc); 10284 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_); 10285 OMP_CLAUSE_DECL (nc) = controlb; 10286 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1; 10287 *cp2 = nc; 10288 cp2 = &OMP_CLAUSE_CHAIN (nc); 10289 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_); 10290 OMP_CLAUSE_DECL (nc) = controlp; 10291 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1; 10292 *cp2 = nc; 10293 cp2 = &OMP_CLAUSE_CHAIN (nc); 10294 10295 *cp1 = gimple_omp_for_clauses (stmt); 10296 gimple_omp_for_set_clauses (stmt, new_clauses1); 10297 *cp2 = gimple_omp_for_clauses (new_stmt); 10298 gimple_omp_for_set_clauses (new_stmt, new_clauses2); 10299 10300 if (is_for_simd) 10301 { 10302 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list); 10303 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list); 10304 10305 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3), 10306 GSI_SAME_STMT); 10307 gsi_remove (&input3_gsi, true); 10308 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3), 10309 GSI_SAME_STMT); 10310 gsi_remove (&scan3_gsi, true); 10311 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4), 10312 GSI_SAME_STMT); 10313 gsi_remove (&input4_gsi, true); 10314 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4), 10315 GSI_SAME_STMT); 10316 gsi_remove (&scan4_gsi, true); 10317 } 10318 else 10319 { 10320 gimple_omp_set_body (scan_stmt1, scan1_list); 10321 gimple_omp_set_body (input_stmt2, input2_list); 10322 } 10323 10324 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1), 10325 GSI_SAME_STMT); 10326 gsi_remove (&input1_gsi, true); 10327 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1), 10328 GSI_SAME_STMT); 10329 gsi_remove (&scan1_gsi, true); 10330 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2), 10331 GSI_SAME_STMT); 10332 gsi_remove (&input2_gsi, true); 10333 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2), 10334 GSI_SAME_STMT); 10335 gsi_remove (&scan2_gsi, true); 10336 10337 gimple_seq_add_seq (body_p, clist); 10338 10339 tree lab1 = create_artificial_label (UNKNOWN_LOCATION); 10340 tree lab2 = create_artificial_label (UNKNOWN_LOCATION); 10341 tree lab3 = create_artificial_label (UNKNOWN_LOCATION); 10342 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2); 10343 gimple_seq_add_stmt (body_p, g); 10344 g = gimple_build_label (lab1); 10345 gimple_seq_add_stmt (body_p, g); 10346 gimple_seq_add_seq (body_p, thr01_list); 10347 g = gimple_build_goto (lab3); 10348 gimple_seq_add_stmt (body_p, g); 10349 g = gimple_build_label (lab2); 10350 gimple_seq_add_stmt (body_p, g); 10351 gimple_seq_add_seq (body_p, thrn1_list); 10352 g = gimple_build_label (lab3); 10353 gimple_seq_add_stmt (body_p, g); 10354 10355 g = gimple_build_assign (ivar, size_zero_node); 10356 gimple_seq_add_stmt (body_p, g); 10357 10358 gimple_seq_add_stmt (body_p, stmt); 10359 gimple_seq_add_seq (body_p, body); 10360 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v, 10361 fd->loop.v)); 10362 10363 g = gimple_build_omp_return (true); 10364 gimple_seq_add_stmt (body_p, g); 10365 gimple_seq_add_seq (body_p, mdlist); 10366 10367 lab1 = create_artificial_label (UNKNOWN_LOCATION); 10368 lab2 = create_artificial_label (UNKNOWN_LOCATION); 10369 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2); 10370 gimple_seq_add_stmt (body_p, g); 10371 g = gimple_build_label (lab1); 10372 gimple_seq_add_stmt (body_p, g); 10373 10374 g = omp_build_barrier (NULL); 10375 gimple_seq_add_stmt (body_p, g); 10376 10377 tree down = create_tmp_var (unsigned_type_node); 10378 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node)); 10379 gimple_seq_add_stmt (body_p, g); 10380 10381 g = gimple_build_assign (k, build_one_cst (unsigned_type_node)); 10382 gimple_seq_add_stmt (body_p, g); 10383 10384 tree num_threadsu = create_tmp_var (unsigned_type_node); 10385 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads); 10386 gimple_seq_add_stmt (body_p, g); 10387 10388 tree thread_numu = create_tmp_var (unsigned_type_node); 10389 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num); 10390 gimple_seq_add_stmt (body_p, g); 10391 10392 tree thread_nump1 = create_tmp_var (unsigned_type_node); 10393 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu, 10394 build_int_cst (unsigned_type_node, 1)); 10395 gimple_seq_add_stmt (body_p, g); 10396 10397 lab3 = create_artificial_label (UNKNOWN_LOCATION); 10398 g = gimple_build_label (lab3); 10399 gimple_seq_add_stmt (body_p, g); 10400 10401 tree twok = create_tmp_var (unsigned_type_node); 10402 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node); 10403 gimple_seq_add_stmt (body_p, g); 10404 10405 tree lab4 = create_artificial_label (UNKNOWN_LOCATION); 10406 tree lab5 = create_artificial_label (UNKNOWN_LOCATION); 10407 tree lab6 = create_artificial_label (UNKNOWN_LOCATION); 10408 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5); 10409 gimple_seq_add_stmt (body_p, g); 10410 g = gimple_build_label (lab4); 10411 gimple_seq_add_stmt (body_p, g); 10412 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node)); 10413 gimple_seq_add_stmt (body_p, g); 10414 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node); 10415 gimple_seq_add_stmt (body_p, g); 10416 10417 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5); 10418 gimple_seq_add_stmt (body_p, g); 10419 g = gimple_build_label (lab6); 10420 gimple_seq_add_stmt (body_p, g); 10421 10422 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node); 10423 gimple_seq_add_stmt (body_p, g); 10424 10425 g = gimple_build_label (lab5); 10426 gimple_seq_add_stmt (body_p, g); 10427 10428 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node); 10429 gimple_seq_add_stmt (body_p, g); 10430 10431 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false)); 10432 DECL_GIMPLE_REG_P (cplx) = 1; 10433 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok); 10434 gimple_call_set_lhs (g, cplx); 10435 gimple_seq_add_stmt (body_p, g); 10436 tree mul = create_tmp_var (unsigned_type_node); 10437 g = gimple_build_assign (mul, REALPART_EXPR, 10438 build1 (REALPART_EXPR, unsigned_type_node, cplx)); 10439 gimple_seq_add_stmt (body_p, g); 10440 tree ovf = create_tmp_var (unsigned_type_node); 10441 g = gimple_build_assign (ovf, IMAGPART_EXPR, 10442 build1 (IMAGPART_EXPR, unsigned_type_node, cplx)); 10443 gimple_seq_add_stmt (body_p, g); 10444 10445 tree lab7 = create_artificial_label (UNKNOWN_LOCATION); 10446 tree lab8 = create_artificial_label (UNKNOWN_LOCATION); 10447 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node), 10448 lab7, lab8); 10449 gimple_seq_add_stmt (body_p, g); 10450 g = gimple_build_label (lab7); 10451 gimple_seq_add_stmt (body_p, g); 10452 10453 tree andv = create_tmp_var (unsigned_type_node); 10454 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down); 10455 gimple_seq_add_stmt (body_p, g); 10456 tree andvm1 = create_tmp_var (unsigned_type_node); 10457 g = gimple_build_assign (andvm1, PLUS_EXPR, andv, 10458 build_minus_one_cst (unsigned_type_node)); 10459 gimple_seq_add_stmt (body_p, g); 10460 10461 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1); 10462 gimple_seq_add_stmt (body_p, g); 10463 10464 tree lab9 = create_artificial_label (UNKNOWN_LOCATION); 10465 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8); 10466 gimple_seq_add_stmt (body_p, g); 10467 g = gimple_build_label (lab9); 10468 gimple_seq_add_stmt (body_p, g); 10469 gimple_seq_add_seq (body_p, reduc_list); 10470 g = gimple_build_label (lab8); 10471 gimple_seq_add_stmt (body_p, g); 10472 10473 tree lab10 = create_artificial_label (UNKNOWN_LOCATION); 10474 tree lab11 = create_artificial_label (UNKNOWN_LOCATION); 10475 tree lab12 = create_artificial_label (UNKNOWN_LOCATION); 10476 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node), 10477 lab10, lab11); 10478 gimple_seq_add_stmt (body_p, g); 10479 g = gimple_build_label (lab10); 10480 gimple_seq_add_stmt (body_p, g); 10481 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node); 10482 gimple_seq_add_stmt (body_p, g); 10483 g = gimple_build_goto (lab12); 10484 gimple_seq_add_stmt (body_p, g); 10485 g = gimple_build_label (lab11); 10486 gimple_seq_add_stmt (body_p, g); 10487 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node); 10488 gimple_seq_add_stmt (body_p, g); 10489 g = gimple_build_label (lab12); 10490 gimple_seq_add_stmt (body_p, g); 10491 10492 g = omp_build_barrier (NULL); 10493 gimple_seq_add_stmt (body_p, g); 10494 10495 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node), 10496 lab3, lab2); 10497 gimple_seq_add_stmt (body_p, g); 10498 10499 g = gimple_build_label (lab2); 10500 gimple_seq_add_stmt (body_p, g); 10501 10502 lab1 = create_artificial_label (UNKNOWN_LOCATION); 10503 lab2 = create_artificial_label (UNKNOWN_LOCATION); 10504 lab3 = create_artificial_label (UNKNOWN_LOCATION); 10505 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2); 10506 gimple_seq_add_stmt (body_p, g); 10507 g = gimple_build_label (lab1); 10508 gimple_seq_add_stmt (body_p, g); 10509 gimple_seq_add_seq (body_p, thr02_list); 10510 g = gimple_build_goto (lab3); 10511 gimple_seq_add_stmt (body_p, g); 10512 g = gimple_build_label (lab2); 10513 gimple_seq_add_stmt (body_p, g); 10514 gimple_seq_add_seq (body_p, thrn2_list); 10515 g = gimple_build_label (lab3); 10516 gimple_seq_add_stmt (body_p, g); 10517 10518 g = gimple_build_assign (ivar, size_zero_node); 10519 gimple_seq_add_stmt (body_p, g); 10520 gimple_seq_add_stmt (body_p, new_stmt); 10521 gimple_seq_add_seq (body_p, new_body); 10522 10523 gimple_seq new_dlist = NULL; 10524 lab1 = create_artificial_label (UNKNOWN_LOCATION); 10525 lab2 = create_artificial_label (UNKNOWN_LOCATION); 10526 tree num_threadsm1 = create_tmp_var (integer_type_node); 10527 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads, 10528 integer_minus_one_node); 10529 gimple_seq_add_stmt (&new_dlist, g); 10530 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2); 10531 gimple_seq_add_stmt (&new_dlist, g); 10532 g = gimple_build_label (lab1); 10533 gimple_seq_add_stmt (&new_dlist, g); 10534 gimple_seq_add_seq (&new_dlist, last_list); 10535 g = gimple_build_label (lab2); 10536 gimple_seq_add_stmt (&new_dlist, g); 10537 gimple_seq_add_seq (&new_dlist, *dlist); 10538 *dlist = new_dlist; 10539 } 10540 10541 /* Lower code for an OMP loop directive. */ 10542 10543 static void 10544 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx) 10545 { 10546 tree *rhs_p, block; 10547 struct omp_for_data fd, *fdp = NULL; 10548 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p)); 10549 gbind *new_stmt; 10550 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL; 10551 gimple_seq cnt_list = NULL, clist = NULL; 10552 gimple_seq oacc_head = NULL, oacc_tail = NULL; 10553 size_t i; 10554 10555 push_gimplify_context (); 10556 10557 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx); 10558 10559 block = make_node (BLOCK); 10560 new_stmt = gimple_build_bind (NULL, NULL, block); 10561 /* Replace at gsi right away, so that 'stmt' is no member 10562 of a sequence anymore as we're going to add to a different 10563 one below. */ 10564 gsi_replace (gsi_p, new_stmt, true); 10565 10566 /* Move declaration of temporaries in the loop body before we make 10567 it go away. */ 10568 omp_for_body = gimple_omp_body (stmt); 10569 if (!gimple_seq_empty_p (omp_for_body) 10570 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND) 10571 { 10572 gbind *inner_bind 10573 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body)); 10574 tree vars = gimple_bind_vars (inner_bind); 10575 gimple_bind_append_vars (new_stmt, vars); 10576 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't 10577 keep them on the inner_bind and it's block. */ 10578 gimple_bind_set_vars (inner_bind, NULL_TREE); 10579 if (gimple_bind_block (inner_bind)) 10580 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE; 10581 } 10582 10583 if (gimple_omp_for_combined_into_p (stmt)) 10584 { 10585 omp_extract_for_data (stmt, &fd, NULL); 10586 fdp = &fd; 10587 10588 /* We need two temporaries with fd.loop.v type (istart/iend) 10589 and then (fd.collapse - 1) temporaries with the same 10590 type for count2 ... countN-1 vars if not constant. */ 10591 size_t count = 2; 10592 tree type = fd.iter_type; 10593 if (fd.collapse > 1 10594 && TREE_CODE (fd.loop.n2) != INTEGER_CST) 10595 count += fd.collapse - 1; 10596 bool taskreg_for 10597 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR 10598 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP); 10599 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt); 10600 tree simtc = NULL; 10601 tree clauses = *pc; 10602 if (taskreg_for) 10603 outerc 10604 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt), 10605 OMP_CLAUSE__LOOPTEMP_); 10606 if (ctx->simt_stmt) 10607 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt), 10608 OMP_CLAUSE__LOOPTEMP_); 10609 for (i = 0; i < count; i++) 10610 { 10611 tree temp; 10612 if (taskreg_for) 10613 { 10614 gcc_assert (outerc); 10615 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer); 10616 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc), 10617 OMP_CLAUSE__LOOPTEMP_); 10618 } 10619 else 10620 { 10621 /* If there are 2 adjacent SIMD stmts, one with _simt_ 10622 clause, another without, make sure they have the same 10623 decls in _looptemp_ clauses, because the outer stmt 10624 they are combined into will look up just one inner_stmt. */ 10625 if (ctx->simt_stmt) 10626 temp = OMP_CLAUSE_DECL (simtc); 10627 else 10628 temp = create_tmp_var (type); 10629 insert_decl_map (&ctx->outer->cb, temp, temp); 10630 } 10631 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_); 10632 OMP_CLAUSE_DECL (*pc) = temp; 10633 pc = &OMP_CLAUSE_CHAIN (*pc); 10634 if (ctx->simt_stmt) 10635 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc), 10636 OMP_CLAUSE__LOOPTEMP_); 10637 } 10638 *pc = clauses; 10639 } 10640 10641 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */ 10642 dlist = NULL; 10643 body = NULL; 10644 tree rclauses 10645 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR, 10646 OMP_CLAUSE_REDUCTION); 10647 tree rtmp = NULL_TREE; 10648 if (rclauses) 10649 { 10650 tree type = build_pointer_type (pointer_sized_int_node); 10651 tree temp = create_tmp_var (type); 10652 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_); 10653 OMP_CLAUSE_DECL (c) = temp; 10654 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt); 10655 gimple_omp_for_set_clauses (stmt, c); 10656 lower_omp_task_reductions (ctx, OMP_FOR, 10657 gimple_omp_for_clauses (stmt), 10658 &tred_ilist, &tred_dlist); 10659 rclauses = c; 10660 rtmp = make_ssa_name (type); 10661 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp)); 10662 } 10663 10664 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt), 10665 ctx); 10666 10667 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx, 10668 fdp); 10669 gimple_seq_add_seq (rclauses ? &tred_ilist : &body, 10670 gimple_omp_for_pre_body (stmt)); 10671 10672 lower_omp (gimple_omp_body_ptr (stmt), ctx); 10673 10674 /* Lower the header expressions. At this point, we can assume that 10675 the header is of the form: 10676 10677 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3) 10678 10679 We just need to make sure that VAL1, VAL2 and VAL3 are lowered 10680 using the .omp_data_s mapping, if needed. */ 10681 for (i = 0; i < gimple_omp_for_collapse (stmt); i++) 10682 { 10683 rhs_p = gimple_omp_for_initial_ptr (stmt, i); 10684 if (!is_gimple_min_invariant (*rhs_p)) 10685 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list); 10686 else if (TREE_CODE (*rhs_p) == ADDR_EXPR) 10687 recompute_tree_invariant_for_addr_expr (*rhs_p); 10688 10689 rhs_p = gimple_omp_for_final_ptr (stmt, i); 10690 if (!is_gimple_min_invariant (*rhs_p)) 10691 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list); 10692 else if (TREE_CODE (*rhs_p) == ADDR_EXPR) 10693 recompute_tree_invariant_for_addr_expr (*rhs_p); 10694 10695 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1); 10696 if (!is_gimple_min_invariant (*rhs_p)) 10697 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list); 10698 } 10699 if (rclauses) 10700 gimple_seq_add_seq (&tred_ilist, cnt_list); 10701 else 10702 gimple_seq_add_seq (&body, cnt_list); 10703 10704 /* Once lowered, extract the bounds and clauses. */ 10705 omp_extract_for_data (stmt, &fd, NULL); 10706 10707 if (is_gimple_omp_oacc (ctx->stmt) 10708 && !ctx_in_oacc_kernels_region (ctx)) 10709 lower_oacc_head_tail (gimple_location (stmt), 10710 gimple_omp_for_clauses (stmt), 10711 &oacc_head, &oacc_tail, ctx); 10712 10713 /* Add OpenACC partitioning and reduction markers just before the loop. */ 10714 if (oacc_head) 10715 gimple_seq_add_seq (&body, oacc_head); 10716 10717 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx); 10718 10719 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR) 10720 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c)) 10721 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR 10722 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c)) 10723 { 10724 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx); 10725 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c))) 10726 OMP_CLAUSE_LINEAR_STEP (c) 10727 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c), 10728 ctx); 10729 } 10730 10731 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP 10732 && gimple_omp_for_grid_phony (stmt)); 10733 if ((ctx->scan_inclusive || ctx->scan_exclusive) 10734 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR) 10735 { 10736 gcc_assert (!phony_loop); 10737 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx); 10738 } 10739 else 10740 { 10741 if (!phony_loop) 10742 gimple_seq_add_stmt (&body, stmt); 10743 gimple_seq_add_seq (&body, gimple_omp_body (stmt)); 10744 } 10745 10746 if (!phony_loop) 10747 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v, 10748 fd.loop.v)); 10749 10750 /* After the loop, add exit clauses. */ 10751 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx); 10752 10753 if (clist) 10754 { 10755 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START); 10756 gcall *g = gimple_build_call (fndecl, 0); 10757 gimple_seq_add_stmt (&body, g); 10758 gimple_seq_add_seq (&body, clist); 10759 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END); 10760 g = gimple_build_call (fndecl, 0); 10761 gimple_seq_add_stmt (&body, g); 10762 } 10763 10764 if (ctx->cancellable) 10765 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label)); 10766 10767 gimple_seq_add_seq (&body, dlist); 10768 10769 if (rclauses) 10770 { 10771 gimple_seq_add_seq (&tred_ilist, body); 10772 body = tred_ilist; 10773 } 10774 10775 body = maybe_catch_exception (body); 10776 10777 if (!phony_loop) 10778 { 10779 /* Region exit marker goes at the end of the loop body. */ 10780 gimple *g = gimple_build_omp_return (fd.have_nowait); 10781 gimple_seq_add_stmt (&body, g); 10782 10783 gimple_seq_add_seq (&body, tred_dlist); 10784 10785 maybe_add_implicit_barrier_cancel (ctx, g, &body); 10786 10787 if (rclauses) 10788 OMP_CLAUSE_DECL (rclauses) = rtmp; 10789 } 10790 10791 /* Add OpenACC joining and reduction markers just after the loop. */ 10792 if (oacc_tail) 10793 gimple_seq_add_seq (&body, oacc_tail); 10794 10795 pop_gimplify_context (new_stmt); 10796 10797 gimple_bind_append_vars (new_stmt, ctx->block_vars); 10798 maybe_remove_omp_member_access_dummy_vars (new_stmt); 10799 BLOCK_VARS (block) = gimple_bind_vars (new_stmt); 10800 if (BLOCK_VARS (block)) 10801 TREE_USED (block) = 1; 10802 10803 gimple_bind_set_body (new_stmt, body); 10804 gimple_omp_set_body (stmt, NULL); 10805 gimple_omp_for_set_pre_body (stmt, NULL); 10806 } 10807 10808 /* Callback for walk_stmts. Check if the current statement only contains 10809 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */ 10810 10811 static tree 10812 check_combined_parallel (gimple_stmt_iterator *gsi_p, 10813 bool *handled_ops_p, 10814 struct walk_stmt_info *wi) 10815 { 10816 int *info = (int *) wi->info; 10817 gimple *stmt = gsi_stmt (*gsi_p); 10818 10819 *handled_ops_p = true; 10820 switch (gimple_code (stmt)) 10821 { 10822 WALK_SUBSTMTS; 10823 10824 case GIMPLE_DEBUG: 10825 break; 10826 case GIMPLE_OMP_FOR: 10827 case GIMPLE_OMP_SECTIONS: 10828 *info = *info == 0 ? 1 : -1; 10829 break; 10830 default: 10831 *info = -1; 10832 break; 10833 } 10834 return NULL; 10835 } 10836 10837 struct omp_taskcopy_context 10838 { 10839 /* This field must be at the beginning, as we do "inheritance": Some 10840 callback functions for tree-inline.c (e.g., omp_copy_decl) 10841 receive a copy_body_data pointer that is up-casted to an 10842 omp_context pointer. */ 10843 copy_body_data cb; 10844 omp_context *ctx; 10845 }; 10846 10847 static tree 10848 task_copyfn_copy_decl (tree var, copy_body_data *cb) 10849 { 10850 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb; 10851 10852 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var)) 10853 return create_tmp_var (TREE_TYPE (var)); 10854 10855 return var; 10856 } 10857 10858 static tree 10859 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type) 10860 { 10861 tree name, new_fields = NULL, type, f; 10862 10863 type = lang_hooks.types.make_type (RECORD_TYPE); 10864 name = DECL_NAME (TYPE_NAME (orig_type)); 10865 name = build_decl (gimple_location (tcctx->ctx->stmt), 10866 TYPE_DECL, name, type); 10867 TYPE_NAME (type) = name; 10868 10869 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f)) 10870 { 10871 tree new_f = copy_node (f); 10872 DECL_CONTEXT (new_f) = type; 10873 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb); 10874 TREE_CHAIN (new_f) = new_fields; 10875 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL); 10876 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL); 10877 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r, 10878 &tcctx->cb, NULL); 10879 new_fields = new_f; 10880 tcctx->cb.decl_map->put (f, new_f); 10881 } 10882 TYPE_FIELDS (type) = nreverse (new_fields); 10883 layout_type (type); 10884 return type; 10885 } 10886 10887 /* Create task copyfn. */ 10888 10889 static void 10890 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx) 10891 { 10892 struct function *child_cfun; 10893 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl; 10894 tree record_type, srecord_type, bind, list; 10895 bool record_needs_remap = false, srecord_needs_remap = false; 10896 splay_tree_node n; 10897 struct omp_taskcopy_context tcctx; 10898 location_t loc = gimple_location (task_stmt); 10899 size_t looptempno = 0; 10900 10901 child_fn = gimple_omp_task_copy_fn (task_stmt); 10902 task_cpyfns.safe_push (task_stmt); 10903 child_cfun = DECL_STRUCT_FUNCTION (child_fn); 10904 gcc_assert (child_cfun->cfg == NULL); 10905 DECL_SAVED_TREE (child_fn) = alloc_stmt_list (); 10906 10907 /* Reset DECL_CONTEXT on function arguments. */ 10908 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t)) 10909 DECL_CONTEXT (t) = child_fn; 10910 10911 /* Populate the function. */ 10912 push_gimplify_context (); 10913 push_cfun (child_cfun); 10914 10915 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL); 10916 TREE_SIDE_EFFECTS (bind) = 1; 10917 list = NULL; 10918 DECL_SAVED_TREE (child_fn) = bind; 10919 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt); 10920 10921 /* Remap src and dst argument types if needed. */ 10922 record_type = ctx->record_type; 10923 srecord_type = ctx->srecord_type; 10924 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f)) 10925 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn)) 10926 { 10927 record_needs_remap = true; 10928 break; 10929 } 10930 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f)) 10931 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn)) 10932 { 10933 srecord_needs_remap = true; 10934 break; 10935 } 10936 10937 if (record_needs_remap || srecord_needs_remap) 10938 { 10939 memset (&tcctx, '\0', sizeof (tcctx)); 10940 tcctx.cb.src_fn = ctx->cb.src_fn; 10941 tcctx.cb.dst_fn = child_fn; 10942 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn); 10943 gcc_checking_assert (tcctx.cb.src_node); 10944 tcctx.cb.dst_node = tcctx.cb.src_node; 10945 tcctx.cb.src_cfun = ctx->cb.src_cfun; 10946 tcctx.cb.copy_decl = task_copyfn_copy_decl; 10947 tcctx.cb.eh_lp_nr = 0; 10948 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE; 10949 tcctx.cb.decl_map = new hash_map<tree, tree>; 10950 tcctx.ctx = ctx; 10951 10952 if (record_needs_remap) 10953 record_type = task_copyfn_remap_type (&tcctx, record_type); 10954 if (srecord_needs_remap) 10955 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type); 10956 } 10957 else 10958 tcctx.cb.decl_map = NULL; 10959 10960 arg = DECL_ARGUMENTS (child_fn); 10961 TREE_TYPE (arg) = build_pointer_type (record_type); 10962 sarg = DECL_CHAIN (arg); 10963 TREE_TYPE (sarg) = build_pointer_type (srecord_type); 10964 10965 /* First pass: initialize temporaries used in record_type and srecord_type 10966 sizes and field offsets. */ 10967 if (tcctx.cb.decl_map) 10968 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c)) 10969 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE) 10970 { 10971 tree *p; 10972 10973 decl = OMP_CLAUSE_DECL (c); 10974 p = tcctx.cb.decl_map->get (decl); 10975 if (p == NULL) 10976 continue; 10977 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl); 10978 sf = (tree) n->value; 10979 sf = *tcctx.cb.decl_map->get (sf); 10980 src = build_simple_mem_ref_loc (loc, sarg); 10981 src = omp_build_component_ref (src, sf); 10982 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src); 10983 append_to_statement_list (t, &list); 10984 } 10985 10986 /* Second pass: copy shared var pointers and copy construct non-VLA 10987 firstprivate vars. */ 10988 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c)) 10989 switch (OMP_CLAUSE_CODE (c)) 10990 { 10991 splay_tree_key key; 10992 case OMP_CLAUSE_SHARED: 10993 decl = OMP_CLAUSE_DECL (c); 10994 key = (splay_tree_key) decl; 10995 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)) 10996 key = (splay_tree_key) &DECL_UID (decl); 10997 n = splay_tree_lookup (ctx->field_map, key); 10998 if (n == NULL) 10999 break; 11000 f = (tree) n->value; 11001 if (tcctx.cb.decl_map) 11002 f = *tcctx.cb.decl_map->get (f); 11003 n = splay_tree_lookup (ctx->sfield_map, key); 11004 sf = (tree) n->value; 11005 if (tcctx.cb.decl_map) 11006 sf = *tcctx.cb.decl_map->get (sf); 11007 src = build_simple_mem_ref_loc (loc, sarg); 11008 src = omp_build_component_ref (src, sf); 11009 dst = build_simple_mem_ref_loc (loc, arg); 11010 dst = omp_build_component_ref (dst, f); 11011 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); 11012 append_to_statement_list (t, &list); 11013 break; 11014 case OMP_CLAUSE_REDUCTION: 11015 case OMP_CLAUSE_IN_REDUCTION: 11016 decl = OMP_CLAUSE_DECL (c); 11017 if (TREE_CODE (decl) == MEM_REF) 11018 { 11019 decl = TREE_OPERAND (decl, 0); 11020 if (TREE_CODE (decl) == POINTER_PLUS_EXPR) 11021 decl = TREE_OPERAND (decl, 0); 11022 if (TREE_CODE (decl) == INDIRECT_REF 11023 || TREE_CODE (decl) == ADDR_EXPR) 11024 decl = TREE_OPERAND (decl, 0); 11025 } 11026 key = (splay_tree_key) decl; 11027 n = splay_tree_lookup (ctx->field_map, key); 11028 if (n == NULL) 11029 break; 11030 f = (tree) n->value; 11031 if (tcctx.cb.decl_map) 11032 f = *tcctx.cb.decl_map->get (f); 11033 n = splay_tree_lookup (ctx->sfield_map, key); 11034 sf = (tree) n->value; 11035 if (tcctx.cb.decl_map) 11036 sf = *tcctx.cb.decl_map->get (sf); 11037 src = build_simple_mem_ref_loc (loc, sarg); 11038 src = omp_build_component_ref (src, sf); 11039 if (decl != OMP_CLAUSE_DECL (c) 11040 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE 11041 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE) 11042 src = build_simple_mem_ref_loc (loc, src); 11043 dst = build_simple_mem_ref_loc (loc, arg); 11044 dst = omp_build_component_ref (dst, f); 11045 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); 11046 append_to_statement_list (t, &list); 11047 break; 11048 case OMP_CLAUSE__LOOPTEMP_: 11049 /* Fields for first two _looptemp_ clauses are initialized by 11050 GOMP_taskloop*, the rest are handled like firstprivate. */ 11051 if (looptempno < 2) 11052 { 11053 looptempno++; 11054 break; 11055 } 11056 /* FALLTHRU */ 11057 case OMP_CLAUSE__REDUCTEMP_: 11058 case OMP_CLAUSE_FIRSTPRIVATE: 11059 decl = OMP_CLAUSE_DECL (c); 11060 if (is_variable_sized (decl)) 11061 break; 11062 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl); 11063 if (n == NULL) 11064 break; 11065 f = (tree) n->value; 11066 if (tcctx.cb.decl_map) 11067 f = *tcctx.cb.decl_map->get (f); 11068 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl); 11069 if (n != NULL) 11070 { 11071 sf = (tree) n->value; 11072 if (tcctx.cb.decl_map) 11073 sf = *tcctx.cb.decl_map->get (sf); 11074 src = build_simple_mem_ref_loc (loc, sarg); 11075 src = omp_build_component_ref (src, sf); 11076 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl)) 11077 src = build_simple_mem_ref_loc (loc, src); 11078 } 11079 else 11080 src = decl; 11081 dst = build_simple_mem_ref_loc (loc, arg); 11082 dst = omp_build_component_ref (dst, f); 11083 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE) 11084 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); 11085 else 11086 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src); 11087 append_to_statement_list (t, &list); 11088 break; 11089 case OMP_CLAUSE_PRIVATE: 11090 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c)) 11091 break; 11092 decl = OMP_CLAUSE_DECL (c); 11093 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl); 11094 f = (tree) n->value; 11095 if (tcctx.cb.decl_map) 11096 f = *tcctx.cb.decl_map->get (f); 11097 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl); 11098 if (n != NULL) 11099 { 11100 sf = (tree) n->value; 11101 if (tcctx.cb.decl_map) 11102 sf = *tcctx.cb.decl_map->get (sf); 11103 src = build_simple_mem_ref_loc (loc, sarg); 11104 src = omp_build_component_ref (src, sf); 11105 if (use_pointer_for_field (decl, NULL)) 11106 src = build_simple_mem_ref_loc (loc, src); 11107 } 11108 else 11109 src = decl; 11110 dst = build_simple_mem_ref_loc (loc, arg); 11111 dst = omp_build_component_ref (dst, f); 11112 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); 11113 append_to_statement_list (t, &list); 11114 break; 11115 default: 11116 break; 11117 } 11118 11119 /* Last pass: handle VLA firstprivates. */ 11120 if (tcctx.cb.decl_map) 11121 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c)) 11122 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE) 11123 { 11124 tree ind, ptr, df; 11125 11126 decl = OMP_CLAUSE_DECL (c); 11127 if (!is_variable_sized (decl)) 11128 continue; 11129 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl); 11130 if (n == NULL) 11131 continue; 11132 f = (tree) n->value; 11133 f = *tcctx.cb.decl_map->get (f); 11134 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl)); 11135 ind = DECL_VALUE_EXPR (decl); 11136 gcc_assert (TREE_CODE (ind) == INDIRECT_REF); 11137 gcc_assert (DECL_P (TREE_OPERAND (ind, 0))); 11138 n = splay_tree_lookup (ctx->sfield_map, 11139 (splay_tree_key) TREE_OPERAND (ind, 0)); 11140 sf = (tree) n->value; 11141 sf = *tcctx.cb.decl_map->get (sf); 11142 src = build_simple_mem_ref_loc (loc, sarg); 11143 src = omp_build_component_ref (src, sf); 11144 src = build_simple_mem_ref_loc (loc, src); 11145 dst = build_simple_mem_ref_loc (loc, arg); 11146 dst = omp_build_component_ref (dst, f); 11147 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src); 11148 append_to_statement_list (t, &list); 11149 n = splay_tree_lookup (ctx->field_map, 11150 (splay_tree_key) TREE_OPERAND (ind, 0)); 11151 df = (tree) n->value; 11152 df = *tcctx.cb.decl_map->get (df); 11153 ptr = build_simple_mem_ref_loc (loc, arg); 11154 ptr = omp_build_component_ref (ptr, df); 11155 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr, 11156 build_fold_addr_expr_loc (loc, dst)); 11157 append_to_statement_list (t, &list); 11158 } 11159 11160 t = build1 (RETURN_EXPR, void_type_node, NULL); 11161 append_to_statement_list (t, &list); 11162 11163 if (tcctx.cb.decl_map) 11164 delete tcctx.cb.decl_map; 11165 pop_gimplify_context (NULL); 11166 BIND_EXPR_BODY (bind) = list; 11167 pop_cfun (); 11168 } 11169 11170 static void 11171 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq) 11172 { 11173 tree c, clauses; 11174 gimple *g; 11175 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i; 11176 11177 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND); 11178 gcc_assert (clauses); 11179 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) 11180 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND) 11181 switch (OMP_CLAUSE_DEPEND_KIND (c)) 11182 { 11183 case OMP_CLAUSE_DEPEND_LAST: 11184 /* Lowering already done at gimplification. */ 11185 return; 11186 case OMP_CLAUSE_DEPEND_IN: 11187 cnt[2]++; 11188 break; 11189 case OMP_CLAUSE_DEPEND_OUT: 11190 case OMP_CLAUSE_DEPEND_INOUT: 11191 cnt[0]++; 11192 break; 11193 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET: 11194 cnt[1]++; 11195 break; 11196 case OMP_CLAUSE_DEPEND_DEPOBJ: 11197 cnt[3]++; 11198 break; 11199 case OMP_CLAUSE_DEPEND_SOURCE: 11200 case OMP_CLAUSE_DEPEND_SINK: 11201 /* FALLTHRU */ 11202 default: 11203 gcc_unreachable (); 11204 } 11205 if (cnt[1] || cnt[3]) 11206 idx = 5; 11207 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3]; 11208 tree type = build_array_type_nelts (ptr_type_node, total + idx); 11209 tree array = create_tmp_var (type); 11210 TREE_ADDRESSABLE (array) = 1; 11211 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE, 11212 NULL_TREE); 11213 if (idx == 5) 11214 { 11215 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0)); 11216 gimple_seq_add_stmt (iseq, g); 11217 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE, 11218 NULL_TREE); 11219 } 11220 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total)); 11221 gimple_seq_add_stmt (iseq, g); 11222 for (i = 0; i < (idx == 5 ? 3 : 1); i++) 11223 { 11224 r = build4 (ARRAY_REF, ptr_type_node, array, 11225 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE); 11226 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i])); 11227 gimple_seq_add_stmt (iseq, g); 11228 } 11229 for (i = 0; i < 4; i++) 11230 { 11231 if (cnt[i] == 0) 11232 continue; 11233 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) 11234 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND) 11235 continue; 11236 else 11237 { 11238 switch (OMP_CLAUSE_DEPEND_KIND (c)) 11239 { 11240 case OMP_CLAUSE_DEPEND_IN: 11241 if (i != 2) 11242 continue; 11243 break; 11244 case OMP_CLAUSE_DEPEND_OUT: 11245 case OMP_CLAUSE_DEPEND_INOUT: 11246 if (i != 0) 11247 continue; 11248 break; 11249 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET: 11250 if (i != 1) 11251 continue; 11252 break; 11253 case OMP_CLAUSE_DEPEND_DEPOBJ: 11254 if (i != 3) 11255 continue; 11256 break; 11257 default: 11258 gcc_unreachable (); 11259 } 11260 tree t = OMP_CLAUSE_DECL (c); 11261 t = fold_convert (ptr_type_node, t); 11262 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue); 11263 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++), 11264 NULL_TREE, NULL_TREE); 11265 g = gimple_build_assign (r, t); 11266 gimple_seq_add_stmt (iseq, g); 11267 } 11268 } 11269 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND); 11270 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST; 11271 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array); 11272 OMP_CLAUSE_CHAIN (c) = *pclauses; 11273 *pclauses = c; 11274 tree clobber = build_clobber (type); 11275 g = gimple_build_assign (array, clobber); 11276 gimple_seq_add_stmt (oseq, g); 11277 } 11278 11279 /* Lower the OpenMP parallel or task directive in the current statement 11280 in GSI_P. CTX holds context information for the directive. */ 11281 11282 static void 11283 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx) 11284 { 11285 tree clauses; 11286 tree child_fn, t; 11287 gimple *stmt = gsi_stmt (*gsi_p); 11288 gbind *par_bind, *bind, *dep_bind = NULL; 11289 gimple_seq par_body; 11290 location_t loc = gimple_location (stmt); 11291 11292 clauses = gimple_omp_taskreg_clauses (stmt); 11293 if (gimple_code (stmt) == GIMPLE_OMP_TASK 11294 && gimple_omp_task_taskwait_p (stmt)) 11295 { 11296 par_bind = NULL; 11297 par_body = NULL; 11298 } 11299 else 11300 { 11301 par_bind 11302 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt))); 11303 par_body = gimple_bind_body (par_bind); 11304 } 11305 child_fn = ctx->cb.dst_fn; 11306 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL 11307 && !gimple_omp_parallel_combined_p (stmt)) 11308 { 11309 struct walk_stmt_info wi; 11310 int ws_num = 0; 11311 11312 memset (&wi, 0, sizeof (wi)); 11313 wi.info = &ws_num; 11314 wi.val_only = true; 11315 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi); 11316 if (ws_num == 1) 11317 gimple_omp_parallel_set_combined_p (stmt, true); 11318 } 11319 gimple_seq dep_ilist = NULL; 11320 gimple_seq dep_olist = NULL; 11321 if (gimple_code (stmt) == GIMPLE_OMP_TASK 11322 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND)) 11323 { 11324 push_gimplify_context (); 11325 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK)); 11326 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt), 11327 &dep_ilist, &dep_olist); 11328 } 11329 11330 if (gimple_code (stmt) == GIMPLE_OMP_TASK 11331 && gimple_omp_task_taskwait_p (stmt)) 11332 { 11333 if (dep_bind) 11334 { 11335 gsi_replace (gsi_p, dep_bind, true); 11336 gimple_bind_add_seq (dep_bind, dep_ilist); 11337 gimple_bind_add_stmt (dep_bind, stmt); 11338 gimple_bind_add_seq (dep_bind, dep_olist); 11339 pop_gimplify_context (dep_bind); 11340 } 11341 return; 11342 } 11343 11344 if (ctx->srecord_type) 11345 create_task_copyfn (as_a <gomp_task *> (stmt), ctx); 11346 11347 gimple_seq tskred_ilist = NULL; 11348 gimple_seq tskred_olist = NULL; 11349 if ((is_task_ctx (ctx) 11350 && gimple_omp_task_taskloop_p (ctx->stmt) 11351 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt), 11352 OMP_CLAUSE_REDUCTION)) 11353 || (is_parallel_ctx (ctx) 11354 && omp_find_clause (gimple_omp_parallel_clauses (stmt), 11355 OMP_CLAUSE__REDUCTEMP_))) 11356 { 11357 if (dep_bind == NULL) 11358 { 11359 push_gimplify_context (); 11360 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK)); 11361 } 11362 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP 11363 : OMP_PARALLEL, 11364 gimple_omp_taskreg_clauses (ctx->stmt), 11365 &tskred_ilist, &tskred_olist); 11366 } 11367 11368 push_gimplify_context (); 11369 11370 gimple_seq par_olist = NULL; 11371 gimple_seq par_ilist = NULL; 11372 gimple_seq par_rlist = NULL; 11373 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL 11374 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt)); 11375 if (phony_construct && ctx->record_type) 11376 { 11377 gcc_checking_assert (!ctx->receiver_decl); 11378 ctx->receiver_decl = create_tmp_var 11379 (build_reference_type (ctx->record_type), ".omp_rec"); 11380 } 11381 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL); 11382 lower_omp (&par_body, ctx); 11383 if (gimple_code (stmt) != GIMPLE_OMP_TASK) 11384 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx); 11385 11386 /* Declare all the variables created by mapping and the variables 11387 declared in the scope of the parallel body. */ 11388 record_vars_into (ctx->block_vars, child_fn); 11389 maybe_remove_omp_member_access_dummy_vars (par_bind); 11390 record_vars_into (gimple_bind_vars (par_bind), child_fn); 11391 11392 if (ctx->record_type) 11393 { 11394 ctx->sender_decl 11395 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type 11396 : ctx->record_type, ".omp_data_o"); 11397 DECL_NAMELESS (ctx->sender_decl) = 1; 11398 TREE_ADDRESSABLE (ctx->sender_decl) = 1; 11399 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl); 11400 } 11401 11402 gimple_seq olist = NULL; 11403 gimple_seq ilist = NULL; 11404 lower_send_clauses (clauses, &ilist, &olist, ctx); 11405 lower_send_shared_vars (&ilist, &olist, ctx); 11406 11407 if (ctx->record_type) 11408 { 11409 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl)); 11410 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl, 11411 clobber)); 11412 } 11413 11414 /* Once all the expansions are done, sequence all the different 11415 fragments inside gimple_omp_body. */ 11416 11417 gimple_seq new_body = NULL; 11418 11419 if (ctx->record_type) 11420 { 11421 t = build_fold_addr_expr_loc (loc, ctx->sender_decl); 11422 /* fixup_child_record_type might have changed receiver_decl's type. */ 11423 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t); 11424 gimple_seq_add_stmt (&new_body, 11425 gimple_build_assign (ctx->receiver_decl, t)); 11426 } 11427 11428 gimple_seq_add_seq (&new_body, par_ilist); 11429 gimple_seq_add_seq (&new_body, par_body); 11430 gimple_seq_add_seq (&new_body, par_rlist); 11431 if (ctx->cancellable) 11432 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label)); 11433 gimple_seq_add_seq (&new_body, par_olist); 11434 new_body = maybe_catch_exception (new_body); 11435 if (gimple_code (stmt) == GIMPLE_OMP_TASK) 11436 gimple_seq_add_stmt (&new_body, 11437 gimple_build_omp_continue (integer_zero_node, 11438 integer_zero_node)); 11439 if (!phony_construct) 11440 { 11441 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false)); 11442 gimple_omp_set_body (stmt, new_body); 11443 } 11444 11445 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE) 11446 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK)); 11447 else 11448 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind)); 11449 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true); 11450 gimple_bind_add_seq (bind, ilist); 11451 if (!phony_construct) 11452 gimple_bind_add_stmt (bind, stmt); 11453 else 11454 gimple_bind_add_seq (bind, new_body); 11455 gimple_bind_add_seq (bind, olist); 11456 11457 pop_gimplify_context (NULL); 11458 11459 if (dep_bind) 11460 { 11461 gimple_bind_add_seq (dep_bind, dep_ilist); 11462 gimple_bind_add_seq (dep_bind, tskred_ilist); 11463 gimple_bind_add_stmt (dep_bind, bind); 11464 gimple_bind_add_seq (dep_bind, tskred_olist); 11465 gimple_bind_add_seq (dep_bind, dep_olist); 11466 pop_gimplify_context (dep_bind); 11467 } 11468 } 11469 11470 /* Lower the GIMPLE_OMP_TARGET in the current statement 11471 in GSI_P. CTX holds context information for the directive. */ 11472 11473 static void 11474 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx) 11475 { 11476 tree clauses; 11477 tree child_fn, t, c; 11478 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p)); 11479 gbind *tgt_bind, *bind, *dep_bind = NULL; 11480 gimple_seq tgt_body, olist, ilist, fplist, new_body; 11481 location_t loc = gimple_location (stmt); 11482 bool offloaded, data_region; 11483 unsigned int map_cnt = 0; 11484 11485 offloaded = is_gimple_omp_offloaded (stmt); 11486 switch (gimple_omp_target_kind (stmt)) 11487 { 11488 case GF_OMP_TARGET_KIND_REGION: 11489 case GF_OMP_TARGET_KIND_UPDATE: 11490 case GF_OMP_TARGET_KIND_ENTER_DATA: 11491 case GF_OMP_TARGET_KIND_EXIT_DATA: 11492 case GF_OMP_TARGET_KIND_OACC_PARALLEL: 11493 case GF_OMP_TARGET_KIND_OACC_KERNELS: 11494 case GF_OMP_TARGET_KIND_OACC_SERIAL: 11495 case GF_OMP_TARGET_KIND_OACC_UPDATE: 11496 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA: 11497 case GF_OMP_TARGET_KIND_OACC_DECLARE: 11498 data_region = false; 11499 break; 11500 case GF_OMP_TARGET_KIND_DATA: 11501 case GF_OMP_TARGET_KIND_OACC_DATA: 11502 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: 11503 data_region = true; 11504 break; 11505 default: 11506 gcc_unreachable (); 11507 } 11508 11509 clauses = gimple_omp_target_clauses (stmt); 11510 11511 gimple_seq dep_ilist = NULL; 11512 gimple_seq dep_olist = NULL; 11513 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND)) 11514 { 11515 push_gimplify_context (); 11516 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK)); 11517 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt), 11518 &dep_ilist, &dep_olist); 11519 } 11520 11521 tgt_bind = NULL; 11522 tgt_body = NULL; 11523 if (offloaded) 11524 { 11525 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt)); 11526 tgt_body = gimple_bind_body (tgt_bind); 11527 } 11528 else if (data_region) 11529 tgt_body = gimple_omp_body (stmt); 11530 child_fn = ctx->cb.dst_fn; 11531 11532 push_gimplify_context (); 11533 fplist = NULL; 11534 11535 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) 11536 switch (OMP_CLAUSE_CODE (c)) 11537 { 11538 tree var, x; 11539 11540 default: 11541 break; 11542 case OMP_CLAUSE_MAP: 11543 #if CHECKING_P 11544 /* First check what we're prepared to handle in the following. */ 11545 switch (OMP_CLAUSE_MAP_KIND (c)) 11546 { 11547 case GOMP_MAP_ALLOC: 11548 case GOMP_MAP_TO: 11549 case GOMP_MAP_FROM: 11550 case GOMP_MAP_TOFROM: 11551 case GOMP_MAP_POINTER: 11552 case GOMP_MAP_TO_PSET: 11553 case GOMP_MAP_DELETE: 11554 case GOMP_MAP_RELEASE: 11555 case GOMP_MAP_ALWAYS_TO: 11556 case GOMP_MAP_ALWAYS_FROM: 11557 case GOMP_MAP_ALWAYS_TOFROM: 11558 case GOMP_MAP_FIRSTPRIVATE_POINTER: 11559 case GOMP_MAP_FIRSTPRIVATE_REFERENCE: 11560 case GOMP_MAP_STRUCT: 11561 case GOMP_MAP_ALWAYS_POINTER: 11562 break; 11563 case GOMP_MAP_IF_PRESENT: 11564 case GOMP_MAP_FORCE_ALLOC: 11565 case GOMP_MAP_FORCE_TO: 11566 case GOMP_MAP_FORCE_FROM: 11567 case GOMP_MAP_FORCE_TOFROM: 11568 case GOMP_MAP_FORCE_PRESENT: 11569 case GOMP_MAP_FORCE_DEVICEPTR: 11570 case GOMP_MAP_DEVICE_RESIDENT: 11571 case GOMP_MAP_LINK: 11572 case GOMP_MAP_ATTACH: 11573 case GOMP_MAP_DETACH: 11574 case GOMP_MAP_FORCE_DETACH: 11575 gcc_assert (is_gimple_omp_oacc (stmt)); 11576 break; 11577 default: 11578 gcc_unreachable (); 11579 } 11580 #endif 11581 /* FALLTHRU */ 11582 case OMP_CLAUSE_TO: 11583 case OMP_CLAUSE_FROM: 11584 oacc_firstprivate: 11585 var = OMP_CLAUSE_DECL (c); 11586 if (!DECL_P (var)) 11587 { 11588 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP 11589 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) 11590 && (OMP_CLAUSE_MAP_KIND (c) 11591 != GOMP_MAP_FIRSTPRIVATE_POINTER))) 11592 map_cnt++; 11593 continue; 11594 } 11595 11596 if (DECL_SIZE (var) 11597 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST) 11598 { 11599 tree var2 = DECL_VALUE_EXPR (var); 11600 gcc_assert (TREE_CODE (var2) == INDIRECT_REF); 11601 var2 = TREE_OPERAND (var2, 0); 11602 gcc_assert (DECL_P (var2)); 11603 var = var2; 11604 } 11605 11606 if (offloaded 11607 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 11608 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER 11609 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)) 11610 { 11611 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE) 11612 { 11613 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)) 11614 && varpool_node::get_create (var)->offloadable) 11615 continue; 11616 11617 tree type = build_pointer_type (TREE_TYPE (var)); 11618 tree new_var = lookup_decl (var, ctx); 11619 x = create_tmp_var_raw (type, get_name (new_var)); 11620 gimple_add_tmp_var (x); 11621 x = build_simple_mem_ref (x); 11622 SET_DECL_VALUE_EXPR (new_var, x); 11623 DECL_HAS_VALUE_EXPR_P (new_var) = 1; 11624 } 11625 continue; 11626 } 11627 11628 if (!maybe_lookup_field (var, ctx)) 11629 continue; 11630 11631 /* Don't remap compute constructs' reduction variables, because the 11632 intermediate result must be local to each gang. */ 11633 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 11634 && OMP_CLAUSE_MAP_IN_REDUCTION (c))) 11635 { 11636 x = build_receiver_ref (var, true, ctx); 11637 tree new_var = lookup_decl (var, ctx); 11638 11639 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 11640 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER 11641 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) 11642 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE) 11643 x = build_simple_mem_ref (x); 11644 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE) 11645 { 11646 gcc_assert (is_gimple_omp_oacc (ctx->stmt)); 11647 if (omp_is_reference (new_var) 11648 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE 11649 || DECL_BY_REFERENCE (var))) 11650 { 11651 /* Create a local object to hold the instance 11652 value. */ 11653 tree type = TREE_TYPE (TREE_TYPE (new_var)); 11654 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var)); 11655 tree inst = create_tmp_var (type, id); 11656 gimplify_assign (inst, fold_indirect_ref (x), &fplist); 11657 x = build_fold_addr_expr (inst); 11658 } 11659 gimplify_assign (new_var, x, &fplist); 11660 } 11661 else if (DECL_P (new_var)) 11662 { 11663 SET_DECL_VALUE_EXPR (new_var, x); 11664 DECL_HAS_VALUE_EXPR_P (new_var) = 1; 11665 } 11666 else 11667 gcc_unreachable (); 11668 } 11669 map_cnt++; 11670 break; 11671 11672 case OMP_CLAUSE_FIRSTPRIVATE: 11673 gcc_checking_assert (offloaded); 11674 if (is_gimple_omp_oacc (ctx->stmt)) 11675 { 11676 /* No 'firstprivate' clauses on OpenACC 'kernels'. */ 11677 gcc_checking_assert (!is_oacc_kernels (ctx)); 11678 11679 goto oacc_firstprivate; 11680 } 11681 map_cnt++; 11682 var = OMP_CLAUSE_DECL (c); 11683 if (!omp_is_reference (var) 11684 && !is_gimple_reg_type (TREE_TYPE (var))) 11685 { 11686 tree new_var = lookup_decl (var, ctx); 11687 if (is_variable_sized (var)) 11688 { 11689 tree pvar = DECL_VALUE_EXPR (var); 11690 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF); 11691 pvar = TREE_OPERAND (pvar, 0); 11692 gcc_assert (DECL_P (pvar)); 11693 tree new_pvar = lookup_decl (pvar, ctx); 11694 x = build_fold_indirect_ref (new_pvar); 11695 TREE_THIS_NOTRAP (x) = 1; 11696 } 11697 else 11698 x = build_receiver_ref (var, true, ctx); 11699 SET_DECL_VALUE_EXPR (new_var, x); 11700 DECL_HAS_VALUE_EXPR_P (new_var) = 1; 11701 } 11702 break; 11703 11704 case OMP_CLAUSE_PRIVATE: 11705 gcc_checking_assert (offloaded); 11706 if (is_gimple_omp_oacc (ctx->stmt)) 11707 { 11708 /* No 'private' clauses on OpenACC 'kernels'. */ 11709 gcc_checking_assert (!is_oacc_kernels (ctx)); 11710 11711 break; 11712 } 11713 var = OMP_CLAUSE_DECL (c); 11714 if (is_variable_sized (var)) 11715 { 11716 tree new_var = lookup_decl (var, ctx); 11717 tree pvar = DECL_VALUE_EXPR (var); 11718 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF); 11719 pvar = TREE_OPERAND (pvar, 0); 11720 gcc_assert (DECL_P (pvar)); 11721 tree new_pvar = lookup_decl (pvar, ctx); 11722 x = build_fold_indirect_ref (new_pvar); 11723 TREE_THIS_NOTRAP (x) = 1; 11724 SET_DECL_VALUE_EXPR (new_var, x); 11725 DECL_HAS_VALUE_EXPR_P (new_var) = 1; 11726 } 11727 break; 11728 11729 case OMP_CLAUSE_USE_DEVICE_PTR: 11730 case OMP_CLAUSE_USE_DEVICE_ADDR: 11731 case OMP_CLAUSE_IS_DEVICE_PTR: 11732 var = OMP_CLAUSE_DECL (c); 11733 map_cnt++; 11734 if (is_variable_sized (var)) 11735 { 11736 tree new_var = lookup_decl (var, ctx); 11737 tree pvar = DECL_VALUE_EXPR (var); 11738 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF); 11739 pvar = TREE_OPERAND (pvar, 0); 11740 gcc_assert (DECL_P (pvar)); 11741 tree new_pvar = lookup_decl (pvar, ctx); 11742 x = build_fold_indirect_ref (new_pvar); 11743 TREE_THIS_NOTRAP (x) = 1; 11744 SET_DECL_VALUE_EXPR (new_var, x); 11745 DECL_HAS_VALUE_EXPR_P (new_var) = 1; 11746 } 11747 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR 11748 && !omp_is_reference (var) 11749 && !omp_is_allocatable_or_ptr (var) 11750 && !lang_hooks.decls.omp_array_data (var, true)) 11751 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE) 11752 { 11753 tree new_var = lookup_decl (var, ctx); 11754 tree type = build_pointer_type (TREE_TYPE (var)); 11755 x = create_tmp_var_raw (type, get_name (new_var)); 11756 gimple_add_tmp_var (x); 11757 x = build_simple_mem_ref (x); 11758 SET_DECL_VALUE_EXPR (new_var, x); 11759 DECL_HAS_VALUE_EXPR_P (new_var) = 1; 11760 } 11761 else 11762 { 11763 tree new_var = lookup_decl (var, ctx); 11764 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var)); 11765 gimple_add_tmp_var (x); 11766 SET_DECL_VALUE_EXPR (new_var, x); 11767 DECL_HAS_VALUE_EXPR_P (new_var) = 1; 11768 } 11769 break; 11770 } 11771 11772 if (offloaded) 11773 { 11774 target_nesting_level++; 11775 lower_omp (&tgt_body, ctx); 11776 target_nesting_level--; 11777 } 11778 else if (data_region) 11779 lower_omp (&tgt_body, ctx); 11780 11781 if (offloaded) 11782 { 11783 /* Declare all the variables created by mapping and the variables 11784 declared in the scope of the target body. */ 11785 record_vars_into (ctx->block_vars, child_fn); 11786 maybe_remove_omp_member_access_dummy_vars (tgt_bind); 11787 record_vars_into (gimple_bind_vars (tgt_bind), child_fn); 11788 } 11789 11790 olist = NULL; 11791 ilist = NULL; 11792 if (ctx->record_type) 11793 { 11794 ctx->sender_decl 11795 = create_tmp_var (ctx->record_type, ".omp_data_arr"); 11796 DECL_NAMELESS (ctx->sender_decl) = 1; 11797 TREE_ADDRESSABLE (ctx->sender_decl) = 1; 11798 t = make_tree_vec (3); 11799 TREE_VEC_ELT (t, 0) = ctx->sender_decl; 11800 TREE_VEC_ELT (t, 1) 11801 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt), 11802 ".omp_data_sizes"); 11803 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1; 11804 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1; 11805 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1; 11806 tree tkind_type = short_unsigned_type_node; 11807 int talign_shift = 8; 11808 TREE_VEC_ELT (t, 2) 11809 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt), 11810 ".omp_data_kinds"); 11811 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1; 11812 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1; 11813 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1; 11814 gimple_omp_target_set_data_arg (stmt, t); 11815 11816 vec<constructor_elt, va_gc> *vsize; 11817 vec<constructor_elt, va_gc> *vkind; 11818 vec_alloc (vsize, map_cnt); 11819 vec_alloc (vkind, map_cnt); 11820 unsigned int map_idx = 0; 11821 11822 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) 11823 switch (OMP_CLAUSE_CODE (c)) 11824 { 11825 tree ovar, nc, s, purpose, var, x, type; 11826 unsigned int talign; 11827 11828 default: 11829 break; 11830 11831 case OMP_CLAUSE_MAP: 11832 case OMP_CLAUSE_TO: 11833 case OMP_CLAUSE_FROM: 11834 oacc_firstprivate_map: 11835 nc = c; 11836 ovar = OMP_CLAUSE_DECL (c); 11837 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 11838 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER 11839 || (OMP_CLAUSE_MAP_KIND (c) 11840 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))) 11841 break; 11842 if (!DECL_P (ovar)) 11843 { 11844 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 11845 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)) 11846 { 11847 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c)) 11848 == get_base_address (ovar)); 11849 nc = OMP_CLAUSE_CHAIN (c); 11850 ovar = OMP_CLAUSE_DECL (nc); 11851 } 11852 else 11853 { 11854 tree x = build_sender_ref (ovar, ctx); 11855 tree v 11856 = build_fold_addr_expr_with_type (ovar, ptr_type_node); 11857 gimplify_assign (x, v, &ilist); 11858 nc = NULL_TREE; 11859 } 11860 } 11861 else 11862 { 11863 if (DECL_SIZE (ovar) 11864 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST) 11865 { 11866 tree ovar2 = DECL_VALUE_EXPR (ovar); 11867 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF); 11868 ovar2 = TREE_OPERAND (ovar2, 0); 11869 gcc_assert (DECL_P (ovar2)); 11870 ovar = ovar2; 11871 } 11872 if (!maybe_lookup_field (ovar, ctx)) 11873 continue; 11874 } 11875 11876 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar)); 11877 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign) 11878 talign = DECL_ALIGN_UNIT (ovar); 11879 if (nc) 11880 { 11881 var = lookup_decl_in_outer_ctx (ovar, ctx); 11882 x = build_sender_ref (ovar, ctx); 11883 11884 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP 11885 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER 11886 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) 11887 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE) 11888 { 11889 gcc_assert (offloaded); 11890 tree avar 11891 = create_tmp_var (TREE_TYPE (TREE_TYPE (x))); 11892 mark_addressable (avar); 11893 gimplify_assign (avar, build_fold_addr_expr (var), &ilist); 11894 talign = DECL_ALIGN_UNIT (avar); 11895 avar = build_fold_addr_expr (avar); 11896 gimplify_assign (x, avar, &ilist); 11897 } 11898 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE) 11899 { 11900 gcc_assert (is_gimple_omp_oacc (ctx->stmt)); 11901 if (!omp_is_reference (var)) 11902 { 11903 if (is_gimple_reg (var) 11904 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)) 11905 TREE_NO_WARNING (var) = 1; 11906 var = build_fold_addr_expr (var); 11907 } 11908 else 11909 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar))); 11910 gimplify_assign (x, var, &ilist); 11911 } 11912 else if (is_gimple_reg (var)) 11913 { 11914 gcc_assert (offloaded); 11915 tree avar = create_tmp_var (TREE_TYPE (var)); 11916 mark_addressable (avar); 11917 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c); 11918 if (GOMP_MAP_COPY_TO_P (map_kind) 11919 || map_kind == GOMP_MAP_POINTER 11920 || map_kind == GOMP_MAP_TO_PSET 11921 || map_kind == GOMP_MAP_FORCE_DEVICEPTR) 11922 { 11923 /* If we need to initialize a temporary 11924 with VAR because it is not addressable, and 11925 the variable hasn't been initialized yet, then 11926 we'll get a warning for the store to avar. 11927 Don't warn in that case, the mapping might 11928 be implicit. */ 11929 TREE_NO_WARNING (var) = 1; 11930 gimplify_assign (avar, var, &ilist); 11931 } 11932 avar = build_fold_addr_expr (avar); 11933 gimplify_assign (x, avar, &ilist); 11934 if ((GOMP_MAP_COPY_FROM_P (map_kind) 11935 || map_kind == GOMP_MAP_FORCE_DEVICEPTR) 11936 && !TYPE_READONLY (TREE_TYPE (var))) 11937 { 11938 x = unshare_expr (x); 11939 x = build_simple_mem_ref (x); 11940 gimplify_assign (var, x, &olist); 11941 } 11942 } 11943 else 11944 { 11945 /* While MAP is handled explicitly by the FE, 11946 for 'target update', only the identified is passed. */ 11947 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM 11948 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO) 11949 && (omp_is_allocatable_or_ptr (var) 11950 && omp_check_optional_argument (var, false))) 11951 var = build_fold_indirect_ref (var); 11952 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM 11953 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO) 11954 || (!omp_is_allocatable_or_ptr (var) 11955 && !omp_check_optional_argument (var, false))) 11956 var = build_fold_addr_expr (var); 11957 gimplify_assign (x, var, &ilist); 11958 } 11959 } 11960 s = NULL_TREE; 11961 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE) 11962 { 11963 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt)); 11964 s = TREE_TYPE (ovar); 11965 if (TREE_CODE (s) == REFERENCE_TYPE 11966 || omp_check_optional_argument (ovar, false)) 11967 s = TREE_TYPE (s); 11968 s = TYPE_SIZE_UNIT (s); 11969 } 11970 else 11971 s = OMP_CLAUSE_SIZE (c); 11972 if (s == NULL_TREE) 11973 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar)); 11974 s = fold_convert (size_type_node, s); 11975 purpose = size_int (map_idx++); 11976 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s); 11977 if (TREE_CODE (s) != INTEGER_CST) 11978 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0; 11979 11980 unsigned HOST_WIDE_INT tkind, tkind_zero; 11981 switch (OMP_CLAUSE_CODE (c)) 11982 { 11983 case OMP_CLAUSE_MAP: 11984 tkind = OMP_CLAUSE_MAP_KIND (c); 11985 tkind_zero = tkind; 11986 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c)) 11987 switch (tkind) 11988 { 11989 case GOMP_MAP_ALLOC: 11990 case GOMP_MAP_IF_PRESENT: 11991 case GOMP_MAP_TO: 11992 case GOMP_MAP_FROM: 11993 case GOMP_MAP_TOFROM: 11994 case GOMP_MAP_ALWAYS_TO: 11995 case GOMP_MAP_ALWAYS_FROM: 11996 case GOMP_MAP_ALWAYS_TOFROM: 11997 case GOMP_MAP_RELEASE: 11998 case GOMP_MAP_FORCE_TO: 11999 case GOMP_MAP_FORCE_FROM: 12000 case GOMP_MAP_FORCE_TOFROM: 12001 case GOMP_MAP_FORCE_PRESENT: 12002 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION; 12003 break; 12004 case GOMP_MAP_DELETE: 12005 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION; 12006 default: 12007 break; 12008 } 12009 if (tkind_zero != tkind) 12010 { 12011 if (integer_zerop (s)) 12012 tkind = tkind_zero; 12013 else if (integer_nonzerop (s)) 12014 tkind_zero = tkind; 12015 } 12016 break; 12017 case OMP_CLAUSE_FIRSTPRIVATE: 12018 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt)); 12019 tkind = GOMP_MAP_TO; 12020 tkind_zero = tkind; 12021 break; 12022 case OMP_CLAUSE_TO: 12023 tkind = GOMP_MAP_TO; 12024 tkind_zero = tkind; 12025 break; 12026 case OMP_CLAUSE_FROM: 12027 tkind = GOMP_MAP_FROM; 12028 tkind_zero = tkind; 12029 break; 12030 default: 12031 gcc_unreachable (); 12032 } 12033 gcc_checking_assert (tkind 12034 < (HOST_WIDE_INT_C (1U) << talign_shift)); 12035 gcc_checking_assert (tkind_zero 12036 < (HOST_WIDE_INT_C (1U) << talign_shift)); 12037 talign = ceil_log2 (talign); 12038 tkind |= talign << talign_shift; 12039 tkind_zero |= talign << talign_shift; 12040 gcc_checking_assert (tkind 12041 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type))); 12042 gcc_checking_assert (tkind_zero 12043 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type))); 12044 if (tkind == tkind_zero) 12045 x = build_int_cstu (tkind_type, tkind); 12046 else 12047 { 12048 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0; 12049 x = build3 (COND_EXPR, tkind_type, 12050 fold_build2 (EQ_EXPR, boolean_type_node, 12051 unshare_expr (s), size_zero_node), 12052 build_int_cstu (tkind_type, tkind_zero), 12053 build_int_cstu (tkind_type, tkind)); 12054 } 12055 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x); 12056 if (nc && nc != c) 12057 c = nc; 12058 break; 12059 12060 case OMP_CLAUSE_FIRSTPRIVATE: 12061 if (is_gimple_omp_oacc (ctx->stmt)) 12062 goto oacc_firstprivate_map; 12063 ovar = OMP_CLAUSE_DECL (c); 12064 if (omp_is_reference (ovar)) 12065 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar))); 12066 else 12067 talign = DECL_ALIGN_UNIT (ovar); 12068 var = lookup_decl_in_outer_ctx (ovar, ctx); 12069 x = build_sender_ref (ovar, ctx); 12070 tkind = GOMP_MAP_FIRSTPRIVATE; 12071 type = TREE_TYPE (ovar); 12072 if (omp_is_reference (ovar)) 12073 type = TREE_TYPE (type); 12074 if ((INTEGRAL_TYPE_P (type) 12075 && TYPE_PRECISION (type) <= POINTER_SIZE) 12076 || TREE_CODE (type) == POINTER_TYPE) 12077 { 12078 tkind = GOMP_MAP_FIRSTPRIVATE_INT; 12079 tree t = var; 12080 if (omp_is_reference (var)) 12081 t = build_simple_mem_ref (var); 12082 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)) 12083 TREE_NO_WARNING (var) = 1; 12084 if (TREE_CODE (type) != POINTER_TYPE) 12085 t = fold_convert (pointer_sized_int_node, t); 12086 t = fold_convert (TREE_TYPE (x), t); 12087 gimplify_assign (x, t, &ilist); 12088 } 12089 else if (omp_is_reference (var)) 12090 gimplify_assign (x, var, &ilist); 12091 else if (is_gimple_reg (var)) 12092 { 12093 tree avar = create_tmp_var (TREE_TYPE (var)); 12094 mark_addressable (avar); 12095 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)) 12096 TREE_NO_WARNING (var) = 1; 12097 gimplify_assign (avar, var, &ilist); 12098 avar = build_fold_addr_expr (avar); 12099 gimplify_assign (x, avar, &ilist); 12100 } 12101 else 12102 { 12103 var = build_fold_addr_expr (var); 12104 gimplify_assign (x, var, &ilist); 12105 } 12106 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT) 12107 s = size_int (0); 12108 else if (omp_is_reference (ovar)) 12109 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar))); 12110 else 12111 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar)); 12112 s = fold_convert (size_type_node, s); 12113 purpose = size_int (map_idx++); 12114 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s); 12115 if (TREE_CODE (s) != INTEGER_CST) 12116 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0; 12117 12118 gcc_checking_assert (tkind 12119 < (HOST_WIDE_INT_C (1U) << talign_shift)); 12120 talign = ceil_log2 (talign); 12121 tkind |= talign << talign_shift; 12122 gcc_checking_assert (tkind 12123 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type))); 12124 CONSTRUCTOR_APPEND_ELT (vkind, purpose, 12125 build_int_cstu (tkind_type, tkind)); 12126 break; 12127 12128 case OMP_CLAUSE_USE_DEVICE_PTR: 12129 case OMP_CLAUSE_USE_DEVICE_ADDR: 12130 case OMP_CLAUSE_IS_DEVICE_PTR: 12131 ovar = OMP_CLAUSE_DECL (c); 12132 var = lookup_decl_in_outer_ctx (ovar, ctx); 12133 12134 if (lang_hooks.decls.omp_array_data (ovar, true)) 12135 { 12136 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR 12137 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT); 12138 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx); 12139 } 12140 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR) 12141 { 12142 tkind = GOMP_MAP_USE_DEVICE_PTR; 12143 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx); 12144 } 12145 else 12146 { 12147 tkind = GOMP_MAP_FIRSTPRIVATE_INT; 12148 x = build_sender_ref (ovar, ctx); 12149 } 12150 12151 if (is_gimple_omp_oacc (ctx->stmt)) 12152 { 12153 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR); 12154 12155 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c)) 12156 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT; 12157 } 12158 12159 type = TREE_TYPE (ovar); 12160 if (lang_hooks.decls.omp_array_data (ovar, true)) 12161 var = lang_hooks.decls.omp_array_data (ovar, false); 12162 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR 12163 && !omp_is_reference (ovar) 12164 && !omp_is_allocatable_or_ptr (ovar)) 12165 || TREE_CODE (type) == ARRAY_TYPE) 12166 var = build_fold_addr_expr (var); 12167 else 12168 { 12169 if (omp_is_reference (ovar) 12170 || omp_check_optional_argument (ovar, false) 12171 || omp_is_allocatable_or_ptr (ovar)) 12172 { 12173 type = TREE_TYPE (type); 12174 if (TREE_CODE (type) != ARRAY_TYPE 12175 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR 12176 && !omp_is_allocatable_or_ptr (ovar)) 12177 || (omp_is_reference (ovar) 12178 && omp_is_allocatable_or_ptr (ovar)))) 12179 var = build_simple_mem_ref (var); 12180 var = fold_convert (TREE_TYPE (x), var); 12181 } 12182 } 12183 tree present; 12184 present = omp_check_optional_argument (ovar, true); 12185 if (present) 12186 { 12187 tree null_label = create_artificial_label (UNKNOWN_LOCATION); 12188 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION); 12189 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION); 12190 tree new_x = unshare_expr (x); 12191 gimplify_expr (&present, &ilist, NULL, is_gimple_val, 12192 fb_rvalue); 12193 gcond *cond = gimple_build_cond_from_tree (present, 12194 notnull_label, 12195 null_label); 12196 gimple_seq_add_stmt (&ilist, cond); 12197 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label)); 12198 gimplify_assign (new_x, null_pointer_node, &ilist); 12199 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label)); 12200 gimple_seq_add_stmt (&ilist, 12201 gimple_build_label (notnull_label)); 12202 gimplify_assign (x, var, &ilist); 12203 gimple_seq_add_stmt (&ilist, 12204 gimple_build_label (opt_arg_label)); 12205 } 12206 else 12207 gimplify_assign (x, var, &ilist); 12208 s = size_int (0); 12209 purpose = size_int (map_idx++); 12210 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s); 12211 gcc_checking_assert (tkind 12212 < (HOST_WIDE_INT_C (1U) << talign_shift)); 12213 gcc_checking_assert (tkind 12214 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type))); 12215 CONSTRUCTOR_APPEND_ELT (vkind, purpose, 12216 build_int_cstu (tkind_type, tkind)); 12217 break; 12218 } 12219 12220 gcc_assert (map_idx == map_cnt); 12221 12222 DECL_INITIAL (TREE_VEC_ELT (t, 1)) 12223 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize); 12224 DECL_INITIAL (TREE_VEC_ELT (t, 2)) 12225 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind); 12226 for (int i = 1; i <= 2; i++) 12227 if (!TREE_STATIC (TREE_VEC_ELT (t, i))) 12228 { 12229 gimple_seq initlist = NULL; 12230 force_gimple_operand (build1 (DECL_EXPR, void_type_node, 12231 TREE_VEC_ELT (t, i)), 12232 &initlist, true, NULL_TREE); 12233 gimple_seq_add_seq (&ilist, initlist); 12234 12235 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i))); 12236 gimple_seq_add_stmt (&olist, 12237 gimple_build_assign (TREE_VEC_ELT (t, i), 12238 clobber)); 12239 } 12240 12241 tree clobber = build_clobber (ctx->record_type); 12242 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl, 12243 clobber)); 12244 } 12245 12246 /* Once all the expansions are done, sequence all the different 12247 fragments inside gimple_omp_body. */ 12248 12249 new_body = NULL; 12250 12251 if (offloaded 12252 && ctx->record_type) 12253 { 12254 t = build_fold_addr_expr_loc (loc, ctx->sender_decl); 12255 /* fixup_child_record_type might have changed receiver_decl's type. */ 12256 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t); 12257 gimple_seq_add_stmt (&new_body, 12258 gimple_build_assign (ctx->receiver_decl, t)); 12259 } 12260 gimple_seq_add_seq (&new_body, fplist); 12261 12262 if (offloaded || data_region) 12263 { 12264 tree prev = NULL_TREE; 12265 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c)) 12266 switch (OMP_CLAUSE_CODE (c)) 12267 { 12268 tree var, x; 12269 default: 12270 break; 12271 case OMP_CLAUSE_FIRSTPRIVATE: 12272 if (is_gimple_omp_oacc (ctx->stmt)) 12273 break; 12274 var = OMP_CLAUSE_DECL (c); 12275 if (omp_is_reference (var) 12276 || is_gimple_reg_type (TREE_TYPE (var))) 12277 { 12278 tree new_var = lookup_decl (var, ctx); 12279 tree type; 12280 type = TREE_TYPE (var); 12281 if (omp_is_reference (var)) 12282 type = TREE_TYPE (type); 12283 if ((INTEGRAL_TYPE_P (type) 12284 && TYPE_PRECISION (type) <= POINTER_SIZE) 12285 || TREE_CODE (type) == POINTER_TYPE) 12286 { 12287 x = build_receiver_ref (var, false, ctx); 12288 if (TREE_CODE (type) != POINTER_TYPE) 12289 x = fold_convert (pointer_sized_int_node, x); 12290 x = fold_convert (type, x); 12291 gimplify_expr (&x, &new_body, NULL, is_gimple_val, 12292 fb_rvalue); 12293 if (omp_is_reference (var)) 12294 { 12295 tree v = create_tmp_var_raw (type, get_name (var)); 12296 gimple_add_tmp_var (v); 12297 TREE_ADDRESSABLE (v) = 1; 12298 gimple_seq_add_stmt (&new_body, 12299 gimple_build_assign (v, x)); 12300 x = build_fold_addr_expr (v); 12301 } 12302 gimple_seq_add_stmt (&new_body, 12303 gimple_build_assign (new_var, x)); 12304 } 12305 else 12306 { 12307 x = build_receiver_ref (var, !omp_is_reference (var), ctx); 12308 gimplify_expr (&x, &new_body, NULL, is_gimple_val, 12309 fb_rvalue); 12310 gimple_seq_add_stmt (&new_body, 12311 gimple_build_assign (new_var, x)); 12312 } 12313 } 12314 else if (is_variable_sized (var)) 12315 { 12316 tree pvar = DECL_VALUE_EXPR (var); 12317 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF); 12318 pvar = TREE_OPERAND (pvar, 0); 12319 gcc_assert (DECL_P (pvar)); 12320 tree new_var = lookup_decl (pvar, ctx); 12321 x = build_receiver_ref (var, false, ctx); 12322 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue); 12323 gimple_seq_add_stmt (&new_body, 12324 gimple_build_assign (new_var, x)); 12325 } 12326 break; 12327 case OMP_CLAUSE_PRIVATE: 12328 if (is_gimple_omp_oacc (ctx->stmt)) 12329 break; 12330 var = OMP_CLAUSE_DECL (c); 12331 if (omp_is_reference (var)) 12332 { 12333 location_t clause_loc = OMP_CLAUSE_LOCATION (c); 12334 tree new_var = lookup_decl (var, ctx); 12335 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var))); 12336 if (TREE_CONSTANT (x)) 12337 { 12338 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)), 12339 get_name (var)); 12340 gimple_add_tmp_var (x); 12341 TREE_ADDRESSABLE (x) = 1; 12342 x = build_fold_addr_expr_loc (clause_loc, x); 12343 } 12344 else 12345 break; 12346 12347 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x); 12348 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue); 12349 gimple_seq_add_stmt (&new_body, 12350 gimple_build_assign (new_var, x)); 12351 } 12352 break; 12353 case OMP_CLAUSE_USE_DEVICE_PTR: 12354 case OMP_CLAUSE_USE_DEVICE_ADDR: 12355 case OMP_CLAUSE_IS_DEVICE_PTR: 12356 tree new_var; 12357 gimple_seq assign_body; 12358 bool is_array_data; 12359 bool do_optional_check; 12360 assign_body = NULL; 12361 do_optional_check = false; 12362 var = OMP_CLAUSE_DECL (c); 12363 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL; 12364 12365 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR) 12366 x = build_sender_ref (is_array_data 12367 ? (splay_tree_key) &DECL_NAME (var) 12368 : (splay_tree_key) &DECL_UID (var), ctx); 12369 else 12370 x = build_receiver_ref (var, false, ctx); 12371 12372 if (is_array_data) 12373 { 12374 bool is_ref = omp_is_reference (var); 12375 do_optional_check = true; 12376 /* First, we copy the descriptor data from the host; then 12377 we update its data to point to the target address. */ 12378 new_var = lookup_decl (var, ctx); 12379 new_var = DECL_VALUE_EXPR (new_var); 12380 tree v = new_var; 12381 12382 if (is_ref) 12383 { 12384 var = build_fold_indirect_ref (var); 12385 gimplify_expr (&var, &assign_body, NULL, is_gimple_val, 12386 fb_rvalue); 12387 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var)); 12388 gimple_add_tmp_var (v); 12389 TREE_ADDRESSABLE (v) = 1; 12390 gimple_seq_add_stmt (&assign_body, 12391 gimple_build_assign (v, var)); 12392 tree rhs = build_fold_addr_expr (v); 12393 gimple_seq_add_stmt (&assign_body, 12394 gimple_build_assign (new_var, rhs)); 12395 } 12396 else 12397 gimple_seq_add_stmt (&assign_body, 12398 gimple_build_assign (new_var, var)); 12399 12400 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false); 12401 gcc_assert (v2); 12402 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue); 12403 gimple_seq_add_stmt (&assign_body, 12404 gimple_build_assign (v2, x)); 12405 } 12406 else if (is_variable_sized (var)) 12407 { 12408 tree pvar = DECL_VALUE_EXPR (var); 12409 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF); 12410 pvar = TREE_OPERAND (pvar, 0); 12411 gcc_assert (DECL_P (pvar)); 12412 new_var = lookup_decl (pvar, ctx); 12413 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue); 12414 gimple_seq_add_stmt (&assign_body, 12415 gimple_build_assign (new_var, x)); 12416 } 12417 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR 12418 && !omp_is_reference (var) 12419 && !omp_is_allocatable_or_ptr (var)) 12420 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE) 12421 { 12422 new_var = lookup_decl (var, ctx); 12423 new_var = DECL_VALUE_EXPR (new_var); 12424 gcc_assert (TREE_CODE (new_var) == MEM_REF); 12425 new_var = TREE_OPERAND (new_var, 0); 12426 gcc_assert (DECL_P (new_var)); 12427 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue); 12428 gimple_seq_add_stmt (&assign_body, 12429 gimple_build_assign (new_var, x)); 12430 } 12431 else 12432 { 12433 tree type = TREE_TYPE (var); 12434 new_var = lookup_decl (var, ctx); 12435 if (omp_is_reference (var)) 12436 { 12437 type = TREE_TYPE (type); 12438 if (TREE_CODE (type) != ARRAY_TYPE 12439 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR 12440 || (omp_is_reference (var) 12441 && omp_is_allocatable_or_ptr (var)))) 12442 { 12443 tree v = create_tmp_var_raw (type, get_name (var)); 12444 gimple_add_tmp_var (v); 12445 TREE_ADDRESSABLE (v) = 1; 12446 x = fold_convert (type, x); 12447 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, 12448 fb_rvalue); 12449 gimple_seq_add_stmt (&assign_body, 12450 gimple_build_assign (v, x)); 12451 x = build_fold_addr_expr (v); 12452 do_optional_check = true; 12453 } 12454 } 12455 new_var = DECL_VALUE_EXPR (new_var); 12456 x = fold_convert (TREE_TYPE (new_var), x); 12457 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue); 12458 gimple_seq_add_stmt (&assign_body, 12459 gimple_build_assign (new_var, x)); 12460 } 12461 tree present; 12462 present = (do_optional_check 12463 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true) 12464 : NULL_TREE); 12465 if (present) 12466 { 12467 tree null_label = create_artificial_label (UNKNOWN_LOCATION); 12468 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION); 12469 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION); 12470 glabel *null_glabel = gimple_build_label (null_label); 12471 glabel *notnull_glabel = gimple_build_label (notnull_label); 12472 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label); 12473 gimplify_expr (&x, &new_body, NULL, is_gimple_val, 12474 fb_rvalue); 12475 gimplify_expr (&present, &new_body, NULL, is_gimple_val, 12476 fb_rvalue); 12477 gcond *cond = gimple_build_cond_from_tree (present, 12478 notnull_label, 12479 null_label); 12480 gimple_seq_add_stmt (&new_body, cond); 12481 gimple_seq_add_stmt (&new_body, null_glabel); 12482 gimplify_assign (new_var, null_pointer_node, &new_body); 12483 gimple_seq_add_stmt (&new_body, opt_arg_ggoto); 12484 gimple_seq_add_stmt (&new_body, notnull_glabel); 12485 gimple_seq_add_seq (&new_body, assign_body); 12486 gimple_seq_add_stmt (&new_body, 12487 gimple_build_label (opt_arg_label)); 12488 } 12489 else 12490 gimple_seq_add_seq (&new_body, assign_body); 12491 break; 12492 } 12493 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass, 12494 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed 12495 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs 12496 or references to VLAs. */ 12497 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c)) 12498 switch (OMP_CLAUSE_CODE (c)) 12499 { 12500 tree var; 12501 default: 12502 break; 12503 case OMP_CLAUSE_MAP: 12504 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER 12505 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE) 12506 { 12507 location_t clause_loc = OMP_CLAUSE_LOCATION (c); 12508 poly_int64 offset = 0; 12509 gcc_assert (prev); 12510 var = OMP_CLAUSE_DECL (c); 12511 if (DECL_P (var) 12512 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE 12513 && is_global_var (maybe_lookup_decl_in_outer_ctx (var, 12514 ctx)) 12515 && varpool_node::get_create (var)->offloadable) 12516 break; 12517 if (TREE_CODE (var) == INDIRECT_REF 12518 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF) 12519 var = TREE_OPERAND (var, 0); 12520 if (TREE_CODE (var) == COMPONENT_REF) 12521 { 12522 var = get_addr_base_and_unit_offset (var, &offset); 12523 gcc_assert (var != NULL_TREE && DECL_P (var)); 12524 } 12525 else if (DECL_SIZE (var) 12526 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST) 12527 { 12528 tree var2 = DECL_VALUE_EXPR (var); 12529 gcc_assert (TREE_CODE (var2) == INDIRECT_REF); 12530 var2 = TREE_OPERAND (var2, 0); 12531 gcc_assert (DECL_P (var2)); 12532 var = var2; 12533 } 12534 tree new_var = lookup_decl (var, ctx), x; 12535 tree type = TREE_TYPE (new_var); 12536 bool is_ref; 12537 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF 12538 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0)) 12539 == COMPONENT_REF)) 12540 { 12541 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0)); 12542 is_ref = true; 12543 new_var = build2 (MEM_REF, type, 12544 build_fold_addr_expr (new_var), 12545 build_int_cst (build_pointer_type (type), 12546 offset)); 12547 } 12548 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF) 12549 { 12550 type = TREE_TYPE (OMP_CLAUSE_DECL (c)); 12551 is_ref = TREE_CODE (type) == REFERENCE_TYPE; 12552 new_var = build2 (MEM_REF, type, 12553 build_fold_addr_expr (new_var), 12554 build_int_cst (build_pointer_type (type), 12555 offset)); 12556 } 12557 else 12558 is_ref = omp_is_reference (var); 12559 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE) 12560 is_ref = false; 12561 bool ref_to_array = false; 12562 if (is_ref) 12563 { 12564 type = TREE_TYPE (type); 12565 if (TREE_CODE (type) == ARRAY_TYPE) 12566 { 12567 type = build_pointer_type (type); 12568 ref_to_array = true; 12569 } 12570 } 12571 else if (TREE_CODE (type) == ARRAY_TYPE) 12572 { 12573 tree decl2 = DECL_VALUE_EXPR (new_var); 12574 gcc_assert (TREE_CODE (decl2) == MEM_REF); 12575 decl2 = TREE_OPERAND (decl2, 0); 12576 gcc_assert (DECL_P (decl2)); 12577 new_var = decl2; 12578 type = TREE_TYPE (new_var); 12579 } 12580 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx); 12581 x = fold_convert_loc (clause_loc, type, x); 12582 if (!integer_zerop (OMP_CLAUSE_SIZE (c))) 12583 { 12584 tree bias = OMP_CLAUSE_SIZE (c); 12585 if (DECL_P (bias)) 12586 bias = lookup_decl (bias, ctx); 12587 bias = fold_convert_loc (clause_loc, sizetype, bias); 12588 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype, 12589 bias); 12590 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR, 12591 TREE_TYPE (x), x, bias); 12592 } 12593 if (ref_to_array) 12594 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x); 12595 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue); 12596 if (is_ref && !ref_to_array) 12597 { 12598 tree t = create_tmp_var_raw (type, get_name (var)); 12599 gimple_add_tmp_var (t); 12600 TREE_ADDRESSABLE (t) = 1; 12601 gimple_seq_add_stmt (&new_body, 12602 gimple_build_assign (t, x)); 12603 x = build_fold_addr_expr_loc (clause_loc, t); 12604 } 12605 gimple_seq_add_stmt (&new_body, 12606 gimple_build_assign (new_var, x)); 12607 prev = NULL_TREE; 12608 } 12609 else if (OMP_CLAUSE_CHAIN (c) 12610 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) 12611 == OMP_CLAUSE_MAP 12612 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c)) 12613 == GOMP_MAP_FIRSTPRIVATE_POINTER 12614 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c)) 12615 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))) 12616 prev = c; 12617 break; 12618 case OMP_CLAUSE_PRIVATE: 12619 var = OMP_CLAUSE_DECL (c); 12620 if (is_variable_sized (var)) 12621 { 12622 location_t clause_loc = OMP_CLAUSE_LOCATION (c); 12623 tree new_var = lookup_decl (var, ctx); 12624 tree pvar = DECL_VALUE_EXPR (var); 12625 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF); 12626 pvar = TREE_OPERAND (pvar, 0); 12627 gcc_assert (DECL_P (pvar)); 12628 tree new_pvar = lookup_decl (pvar, ctx); 12629 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN); 12630 tree al = size_int (DECL_ALIGN (var)); 12631 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var)); 12632 x = build_call_expr_loc (clause_loc, atmp, 2, x, al); 12633 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x); 12634 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue); 12635 gimple_seq_add_stmt (&new_body, 12636 gimple_build_assign (new_pvar, x)); 12637 } 12638 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt)) 12639 { 12640 location_t clause_loc = OMP_CLAUSE_LOCATION (c); 12641 tree new_var = lookup_decl (var, ctx); 12642 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var))); 12643 if (TREE_CONSTANT (x)) 12644 break; 12645 else 12646 { 12647 tree atmp 12648 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN); 12649 tree rtype = TREE_TYPE (TREE_TYPE (new_var)); 12650 tree al = size_int (TYPE_ALIGN (rtype)); 12651 x = build_call_expr_loc (clause_loc, atmp, 2, x, al); 12652 } 12653 12654 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x); 12655 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue); 12656 gimple_seq_add_stmt (&new_body, 12657 gimple_build_assign (new_var, x)); 12658 } 12659 break; 12660 } 12661 12662 gimple_seq fork_seq = NULL; 12663 gimple_seq join_seq = NULL; 12664 12665 if (offloaded && is_gimple_omp_oacc (ctx->stmt)) 12666 { 12667 /* If there are reductions on the offloaded region itself, treat 12668 them as a dummy GANG loop. */ 12669 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG); 12670 12671 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level, 12672 false, NULL, NULL, &fork_seq, &join_seq, ctx); 12673 } 12674 12675 gimple_seq_add_seq (&new_body, fork_seq); 12676 gimple_seq_add_seq (&new_body, tgt_body); 12677 gimple_seq_add_seq (&new_body, join_seq); 12678 12679 if (offloaded) 12680 { 12681 new_body = maybe_catch_exception (new_body); 12682 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false)); 12683 } 12684 gimple_omp_set_body (stmt, new_body); 12685 } 12686 12687 bind = gimple_build_bind (NULL, NULL, 12688 tgt_bind ? gimple_bind_block (tgt_bind) 12689 : NULL_TREE); 12690 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true); 12691 gimple_bind_add_seq (bind, ilist); 12692 gimple_bind_add_stmt (bind, stmt); 12693 gimple_bind_add_seq (bind, olist); 12694 12695 pop_gimplify_context (NULL); 12696 12697 if (dep_bind) 12698 { 12699 gimple_bind_add_seq (dep_bind, dep_ilist); 12700 gimple_bind_add_stmt (dep_bind, bind); 12701 gimple_bind_add_seq (dep_bind, dep_olist); 12702 pop_gimplify_context (dep_bind); 12703 } 12704 } 12705 12706 /* Expand code for an OpenMP teams directive. */ 12707 12708 static void 12709 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx) 12710 { 12711 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p)); 12712 push_gimplify_context (); 12713 12714 tree block = make_node (BLOCK); 12715 gbind *bind = gimple_build_bind (NULL, NULL, block); 12716 gsi_replace (gsi_p, bind, true); 12717 gimple_seq bind_body = NULL; 12718 gimple_seq dlist = NULL; 12719 gimple_seq olist = NULL; 12720 12721 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt), 12722 OMP_CLAUSE_NUM_TEAMS); 12723 if (num_teams == NULL_TREE) 12724 num_teams = build_int_cst (unsigned_type_node, 0); 12725 else 12726 { 12727 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams); 12728 num_teams = fold_convert (unsigned_type_node, num_teams); 12729 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue); 12730 } 12731 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt), 12732 OMP_CLAUSE_THREAD_LIMIT); 12733 if (thread_limit == NULL_TREE) 12734 thread_limit = build_int_cst (unsigned_type_node, 0); 12735 else 12736 { 12737 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit); 12738 thread_limit = fold_convert (unsigned_type_node, thread_limit); 12739 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val, 12740 fb_rvalue); 12741 } 12742 12743 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt), 12744 &bind_body, &dlist, ctx, NULL); 12745 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx); 12746 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, 12747 NULL, ctx); 12748 if (!gimple_omp_teams_grid_phony (teams_stmt)) 12749 { 12750 gimple_seq_add_stmt (&bind_body, teams_stmt); 12751 location_t loc = gimple_location (teams_stmt); 12752 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS); 12753 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit); 12754 gimple_set_location (call, loc); 12755 gimple_seq_add_stmt (&bind_body, call); 12756 } 12757 12758 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt)); 12759 gimple_omp_set_body (teams_stmt, NULL); 12760 gimple_seq_add_seq (&bind_body, olist); 12761 gimple_seq_add_seq (&bind_body, dlist); 12762 if (!gimple_omp_teams_grid_phony (teams_stmt)) 12763 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true)); 12764 gimple_bind_set_body (bind, bind_body); 12765 12766 pop_gimplify_context (bind); 12767 12768 gimple_bind_append_vars (bind, ctx->block_vars); 12769 BLOCK_VARS (block) = ctx->block_vars; 12770 if (BLOCK_VARS (block)) 12771 TREE_USED (block) = 1; 12772 } 12773 12774 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */ 12775 12776 static void 12777 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx) 12778 { 12779 gimple *stmt = gsi_stmt (*gsi_p); 12780 lower_omp (gimple_omp_body_ptr (stmt), ctx); 12781 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt), 12782 gimple_build_omp_return (false)); 12783 } 12784 12785 12786 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be 12787 regimplified. If DATA is non-NULL, lower_omp_1 is outside 12788 of OMP context, but with task_shared_vars set. */ 12789 12790 static tree 12791 lower_omp_regimplify_p (tree *tp, int *walk_subtrees, 12792 void *data) 12793 { 12794 tree t = *tp; 12795 12796 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */ 12797 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL) 12798 && data == NULL 12799 && DECL_HAS_VALUE_EXPR_P (t)) 12800 return t; 12801 12802 if (task_shared_vars 12803 && DECL_P (t) 12804 && bitmap_bit_p (task_shared_vars, DECL_UID (t))) 12805 return t; 12806 12807 /* If a global variable has been privatized, TREE_CONSTANT on 12808 ADDR_EXPR might be wrong. */ 12809 if (data == NULL && TREE_CODE (t) == ADDR_EXPR) 12810 recompute_tree_invariant_for_addr_expr (t); 12811 12812 *walk_subtrees = !IS_TYPE_OR_DECL_P (t); 12813 return NULL_TREE; 12814 } 12815 12816 /* Data to be communicated between lower_omp_regimplify_operands and 12817 lower_omp_regimplify_operands_p. */ 12818 12819 struct lower_omp_regimplify_operands_data 12820 { 12821 omp_context *ctx; 12822 vec<tree> *decls; 12823 }; 12824 12825 /* Helper function for lower_omp_regimplify_operands. Find 12826 omp_member_access_dummy_var vars and adjust temporarily their 12827 DECL_VALUE_EXPRs if needed. */ 12828 12829 static tree 12830 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees, 12831 void *data) 12832 { 12833 tree t = omp_member_access_dummy_var (*tp); 12834 if (t) 12835 { 12836 struct walk_stmt_info *wi = (struct walk_stmt_info *) data; 12837 lower_omp_regimplify_operands_data *ldata 12838 = (lower_omp_regimplify_operands_data *) wi->info; 12839 tree o = maybe_lookup_decl (t, ldata->ctx); 12840 if (o != t) 12841 { 12842 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp)); 12843 ldata->decls->safe_push (*tp); 12844 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o); 12845 SET_DECL_VALUE_EXPR (*tp, v); 12846 } 12847 } 12848 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp); 12849 return NULL_TREE; 12850 } 12851 12852 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs 12853 of omp_member_access_dummy_var vars during regimplification. */ 12854 12855 static void 12856 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt, 12857 gimple_stmt_iterator *gsi_p) 12858 { 12859 auto_vec<tree, 10> decls; 12860 if (ctx) 12861 { 12862 struct walk_stmt_info wi; 12863 memset (&wi, '\0', sizeof (wi)); 12864 struct lower_omp_regimplify_operands_data data; 12865 data.ctx = ctx; 12866 data.decls = &decls; 12867 wi.info = &data; 12868 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi); 12869 } 12870 gimple_regimplify_operands (stmt, gsi_p); 12871 while (!decls.is_empty ()) 12872 { 12873 tree t = decls.pop (); 12874 tree v = decls.pop (); 12875 SET_DECL_VALUE_EXPR (t, v); 12876 } 12877 } 12878 12879 static void 12880 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx) 12881 { 12882 gimple *stmt = gsi_stmt (*gsi_p); 12883 struct walk_stmt_info wi; 12884 gcall *call_stmt; 12885 12886 if (gimple_has_location (stmt)) 12887 input_location = gimple_location (stmt); 12888 12889 if (task_shared_vars) 12890 memset (&wi, '\0', sizeof (wi)); 12891 12892 /* If we have issued syntax errors, avoid doing any heavy lifting. 12893 Just replace the OMP directives with a NOP to avoid 12894 confusing RTL expansion. */ 12895 if (seen_error () && is_gimple_omp (stmt)) 12896 { 12897 gsi_replace (gsi_p, gimple_build_nop (), true); 12898 return; 12899 } 12900 12901 switch (gimple_code (stmt)) 12902 { 12903 case GIMPLE_COND: 12904 { 12905 gcond *cond_stmt = as_a <gcond *> (stmt); 12906 if ((ctx || task_shared_vars) 12907 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt), 12908 lower_omp_regimplify_p, 12909 ctx ? NULL : &wi, NULL) 12910 || walk_tree (gimple_cond_rhs_ptr (cond_stmt), 12911 lower_omp_regimplify_p, 12912 ctx ? NULL : &wi, NULL))) 12913 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p); 12914 } 12915 break; 12916 case GIMPLE_CATCH: 12917 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx); 12918 break; 12919 case GIMPLE_EH_FILTER: 12920 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx); 12921 break; 12922 case GIMPLE_TRY: 12923 lower_omp (gimple_try_eval_ptr (stmt), ctx); 12924 lower_omp (gimple_try_cleanup_ptr (stmt), ctx); 12925 break; 12926 case GIMPLE_TRANSACTION: 12927 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)), 12928 ctx); 12929 break; 12930 case GIMPLE_BIND: 12931 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx); 12932 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt)); 12933 break; 12934 case GIMPLE_OMP_PARALLEL: 12935 case GIMPLE_OMP_TASK: 12936 ctx = maybe_lookup_ctx (stmt); 12937 gcc_assert (ctx); 12938 if (ctx->cancellable) 12939 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION); 12940 lower_omp_taskreg (gsi_p, ctx); 12941 break; 12942 case GIMPLE_OMP_FOR: 12943 ctx = maybe_lookup_ctx (stmt); 12944 gcc_assert (ctx); 12945 if (ctx->cancellable) 12946 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION); 12947 lower_omp_for (gsi_p, ctx); 12948 break; 12949 case GIMPLE_OMP_SECTIONS: 12950 ctx = maybe_lookup_ctx (stmt); 12951 gcc_assert (ctx); 12952 if (ctx->cancellable) 12953 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION); 12954 lower_omp_sections (gsi_p, ctx); 12955 break; 12956 case GIMPLE_OMP_SINGLE: 12957 ctx = maybe_lookup_ctx (stmt); 12958 gcc_assert (ctx); 12959 lower_omp_single (gsi_p, ctx); 12960 break; 12961 case GIMPLE_OMP_MASTER: 12962 ctx = maybe_lookup_ctx (stmt); 12963 gcc_assert (ctx); 12964 lower_omp_master (gsi_p, ctx); 12965 break; 12966 case GIMPLE_OMP_TASKGROUP: 12967 ctx = maybe_lookup_ctx (stmt); 12968 gcc_assert (ctx); 12969 lower_omp_taskgroup (gsi_p, ctx); 12970 break; 12971 case GIMPLE_OMP_ORDERED: 12972 ctx = maybe_lookup_ctx (stmt); 12973 gcc_assert (ctx); 12974 lower_omp_ordered (gsi_p, ctx); 12975 break; 12976 case GIMPLE_OMP_SCAN: 12977 ctx = maybe_lookup_ctx (stmt); 12978 gcc_assert (ctx); 12979 lower_omp_scan (gsi_p, ctx); 12980 break; 12981 case GIMPLE_OMP_CRITICAL: 12982 ctx = maybe_lookup_ctx (stmt); 12983 gcc_assert (ctx); 12984 lower_omp_critical (gsi_p, ctx); 12985 break; 12986 case GIMPLE_OMP_ATOMIC_LOAD: 12987 if ((ctx || task_shared_vars) 12988 && walk_tree (gimple_omp_atomic_load_rhs_ptr ( 12989 as_a <gomp_atomic_load *> (stmt)), 12990 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL)) 12991 lower_omp_regimplify_operands (ctx, stmt, gsi_p); 12992 break; 12993 case GIMPLE_OMP_TARGET: 12994 ctx = maybe_lookup_ctx (stmt); 12995 gcc_assert (ctx); 12996 lower_omp_target (gsi_p, ctx); 12997 break; 12998 case GIMPLE_OMP_TEAMS: 12999 ctx = maybe_lookup_ctx (stmt); 13000 gcc_assert (ctx); 13001 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt))) 13002 lower_omp_taskreg (gsi_p, ctx); 13003 else 13004 lower_omp_teams (gsi_p, ctx); 13005 break; 13006 case GIMPLE_OMP_GRID_BODY: 13007 ctx = maybe_lookup_ctx (stmt); 13008 gcc_assert (ctx); 13009 lower_omp_grid_body (gsi_p, ctx); 13010 break; 13011 case GIMPLE_CALL: 13012 tree fndecl; 13013 call_stmt = as_a <gcall *> (stmt); 13014 fndecl = gimple_call_fndecl (call_stmt); 13015 if (fndecl 13016 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)) 13017 switch (DECL_FUNCTION_CODE (fndecl)) 13018 { 13019 case BUILT_IN_GOMP_BARRIER: 13020 if (ctx == NULL) 13021 break; 13022 /* FALLTHRU */ 13023 case BUILT_IN_GOMP_CANCEL: 13024 case BUILT_IN_GOMP_CANCELLATION_POINT: 13025 omp_context *cctx; 13026 cctx = ctx; 13027 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION) 13028 cctx = cctx->outer; 13029 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE); 13030 if (!cctx->cancellable) 13031 { 13032 if (DECL_FUNCTION_CODE (fndecl) 13033 == BUILT_IN_GOMP_CANCELLATION_POINT) 13034 { 13035 stmt = gimple_build_nop (); 13036 gsi_replace (gsi_p, stmt, false); 13037 } 13038 break; 13039 } 13040 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER) 13041 { 13042 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL); 13043 gimple_call_set_fndecl (call_stmt, fndecl); 13044 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl)); 13045 } 13046 tree lhs; 13047 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl))); 13048 gimple_call_set_lhs (call_stmt, lhs); 13049 tree fallthru_label; 13050 fallthru_label = create_artificial_label (UNKNOWN_LOCATION); 13051 gimple *g; 13052 g = gimple_build_label (fallthru_label); 13053 gsi_insert_after (gsi_p, g, GSI_SAME_STMT); 13054 g = gimple_build_cond (NE_EXPR, lhs, 13055 fold_convert (TREE_TYPE (lhs), 13056 boolean_false_node), 13057 cctx->cancel_label, fallthru_label); 13058 gsi_insert_after (gsi_p, g, GSI_SAME_STMT); 13059 break; 13060 default: 13061 break; 13062 } 13063 goto regimplify; 13064 13065 case GIMPLE_ASSIGN: 13066 for (omp_context *up = ctx; up; up = up->outer) 13067 { 13068 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED 13069 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL 13070 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP 13071 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION 13072 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN 13073 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET 13074 && (gimple_omp_target_kind (up->stmt) 13075 == GF_OMP_TARGET_KIND_DATA))) 13076 continue; 13077 else if (!up->lastprivate_conditional_map) 13078 break; 13079 tree lhs = get_base_address (gimple_assign_lhs (stmt)); 13080 if (TREE_CODE (lhs) == MEM_REF 13081 && DECL_P (TREE_OPERAND (lhs, 0)) 13082 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs, 13083 0))) == REFERENCE_TYPE) 13084 lhs = TREE_OPERAND (lhs, 0); 13085 if (DECL_P (lhs)) 13086 if (tree *v = up->lastprivate_conditional_map->get (lhs)) 13087 { 13088 tree clauses; 13089 if (up->combined_into_simd_safelen1) 13090 { 13091 up = up->outer; 13092 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN) 13093 up = up->outer; 13094 } 13095 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR) 13096 clauses = gimple_omp_for_clauses (up->stmt); 13097 else 13098 clauses = gimple_omp_sections_clauses (up->stmt); 13099 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_); 13100 if (!OMP_CLAUSE__CONDTEMP__ITER (c)) 13101 c = omp_find_clause (OMP_CLAUSE_CHAIN (c), 13102 OMP_CLAUSE__CONDTEMP_); 13103 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c)); 13104 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c)); 13105 gsi_insert_after (gsi_p, g, GSI_SAME_STMT); 13106 } 13107 } 13108 /* FALLTHRU */ 13109 13110 default: 13111 regimplify: 13112 if ((ctx || task_shared_vars) 13113 && walk_gimple_op (stmt, lower_omp_regimplify_p, 13114 ctx ? NULL : &wi)) 13115 { 13116 /* Just remove clobbers, this should happen only if we have 13117 "privatized" local addressable variables in SIMD regions, 13118 the clobber isn't needed in that case and gimplifying address 13119 of the ARRAY_REF into a pointer and creating MEM_REF based 13120 clobber would create worse code than we get with the clobber 13121 dropped. */ 13122 if (gimple_clobber_p (stmt)) 13123 { 13124 gsi_replace (gsi_p, gimple_build_nop (), true); 13125 break; 13126 } 13127 lower_omp_regimplify_operands (ctx, stmt, gsi_p); 13128 } 13129 break; 13130 } 13131 } 13132 13133 static void 13134 lower_omp (gimple_seq *body, omp_context *ctx) 13135 { 13136 location_t saved_location = input_location; 13137 gimple_stmt_iterator gsi; 13138 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi)) 13139 lower_omp_1 (&gsi, ctx); 13140 /* During gimplification, we haven't folded statments inside offloading 13141 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */ 13142 if (target_nesting_level || taskreg_nesting_level) 13143 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi)) 13144 fold_stmt (&gsi); 13145 input_location = saved_location; 13146 } 13147 13148 /* Main entry point. */ 13149 13150 static unsigned int 13151 execute_lower_omp (void) 13152 { 13153 gimple_seq body; 13154 int i; 13155 omp_context *ctx; 13156 13157 /* This pass always runs, to provide PROP_gimple_lomp. 13158 But often, there is nothing to do. */ 13159 if (flag_openacc == 0 && flag_openmp == 0 13160 && flag_openmp_simd == 0) 13161 return 0; 13162 13163 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0, 13164 delete_omp_context); 13165 13166 body = gimple_body (current_function_decl); 13167 13168 if (hsa_gen_requested_p ()) 13169 omp_grid_gridify_all_targets (&body); 13170 13171 scan_omp (&body, NULL); 13172 gcc_assert (taskreg_nesting_level == 0); 13173 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx) 13174 finish_taskreg_scan (ctx); 13175 taskreg_contexts.release (); 13176 13177 if (all_contexts->root) 13178 { 13179 if (task_shared_vars) 13180 push_gimplify_context (); 13181 lower_omp (&body, NULL); 13182 if (task_shared_vars) 13183 pop_gimplify_context (NULL); 13184 } 13185 13186 if (all_contexts) 13187 { 13188 splay_tree_delete (all_contexts); 13189 all_contexts = NULL; 13190 } 13191 BITMAP_FREE (task_shared_vars); 13192 BITMAP_FREE (global_nonaddressable_vars); 13193 13194 /* If current function is a method, remove artificial dummy VAR_DECL created 13195 for non-static data member privatization, they aren't needed for 13196 debuginfo nor anything else, have been already replaced everywhere in the 13197 IL and cause problems with LTO. */ 13198 if (DECL_ARGUMENTS (current_function_decl) 13199 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl)) 13200 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl))) 13201 == POINTER_TYPE)) 13202 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl)); 13203 13204 gomp_task *task_stmt; 13205 unsigned j; 13206 FOR_EACH_VEC_ELT (task_cpyfns, j, task_stmt) 13207 finalize_task_copyfn (task_stmt); 13208 task_cpyfns.release (); 13209 return 0; 13210 } 13211 13212 namespace { 13213 13214 const pass_data pass_data_lower_omp = 13215 { 13216 GIMPLE_PASS, /* type */ 13217 "omplower", /* name */ 13218 OPTGROUP_OMP, /* optinfo_flags */ 13219 TV_NONE, /* tv_id */ 13220 PROP_gimple_any, /* properties_required */ 13221 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */ 13222 0, /* properties_destroyed */ 13223 0, /* todo_flags_start */ 13224 0, /* todo_flags_finish */ 13225 }; 13226 13227 class pass_lower_omp : public gimple_opt_pass 13228 { 13229 public: 13230 pass_lower_omp (gcc::context *ctxt) 13231 : gimple_opt_pass (pass_data_lower_omp, ctxt) 13232 {} 13233 13234 /* opt_pass methods: */ 13235 virtual unsigned int execute (function *) { return execute_lower_omp (); } 13236 13237 }; // class pass_lower_omp 13238 13239 } // anon namespace 13240 13241 gimple_opt_pass * 13242 make_pass_lower_omp (gcc::context *ctxt) 13243 { 13244 return new pass_lower_omp (ctxt); 13245 } 13246 13247 /* The following is a utility to diagnose structured block violations. 13248 It is not part of the "omplower" pass, as that's invoked too late. It 13249 should be invoked by the respective front ends after gimplification. */ 13250 13251 static splay_tree all_labels; 13252 13253 /* Check for mismatched contexts and generate an error if needed. Return 13254 true if an error is detected. */ 13255 13256 static bool 13257 diagnose_sb_0 (gimple_stmt_iterator *gsi_p, 13258 gimple *branch_ctx, gimple *label_ctx) 13259 { 13260 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx)); 13261 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx)); 13262 13263 if (label_ctx == branch_ctx) 13264 return false; 13265 13266 const char* kind = NULL; 13267 13268 if (flag_openacc) 13269 { 13270 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx)) 13271 || (label_ctx && is_gimple_omp_oacc (label_ctx))) 13272 { 13273 gcc_checking_assert (kind == NULL); 13274 kind = "OpenACC"; 13275 } 13276 } 13277 if (kind == NULL) 13278 { 13279 gcc_checking_assert (flag_openmp || flag_openmp_simd); 13280 kind = "OpenMP"; 13281 } 13282 13283 /* Previously we kept track of the label's entire context in diagnose_sb_[12] 13284 so we could traverse it and issue a correct "exit" or "enter" error 13285 message upon a structured block violation. 13286 13287 We built the context by building a list with tree_cons'ing, but there is 13288 no easy counterpart in gimple tuples. It seems like far too much work 13289 for issuing exit/enter error messages. If someone really misses the 13290 distinct error message... patches welcome. */ 13291 13292 #if 0 13293 /* Try to avoid confusing the user by producing and error message 13294 with correct "exit" or "enter" verbiage. We prefer "exit" 13295 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */ 13296 if (branch_ctx == NULL) 13297 exit_p = false; 13298 else 13299 { 13300 while (label_ctx) 13301 { 13302 if (TREE_VALUE (label_ctx) == branch_ctx) 13303 { 13304 exit_p = false; 13305 break; 13306 } 13307 label_ctx = TREE_CHAIN (label_ctx); 13308 } 13309 } 13310 13311 if (exit_p) 13312 error ("invalid exit from %s structured block", kind); 13313 else 13314 error ("invalid entry to %s structured block", kind); 13315 #endif 13316 13317 /* If it's obvious we have an invalid entry, be specific about the error. */ 13318 if (branch_ctx == NULL) 13319 error ("invalid entry to %s structured block", kind); 13320 else 13321 { 13322 /* Otherwise, be vague and lazy, but efficient. */ 13323 error ("invalid branch to/from %s structured block", kind); 13324 } 13325 13326 gsi_replace (gsi_p, gimple_build_nop (), false); 13327 return true; 13328 } 13329 13330 /* Pass 1: Create a minimal tree of structured blocks, and record 13331 where each label is found. */ 13332 13333 static tree 13334 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, 13335 struct walk_stmt_info *wi) 13336 { 13337 gimple *context = (gimple *) wi->info; 13338 gimple *inner_context; 13339 gimple *stmt = gsi_stmt (*gsi_p); 13340 13341 *handled_ops_p = true; 13342 13343 switch (gimple_code (stmt)) 13344 { 13345 WALK_SUBSTMTS; 13346 13347 case GIMPLE_OMP_PARALLEL: 13348 case GIMPLE_OMP_TASK: 13349 case GIMPLE_OMP_SECTIONS: 13350 case GIMPLE_OMP_SINGLE: 13351 case GIMPLE_OMP_SECTION: 13352 case GIMPLE_OMP_MASTER: 13353 case GIMPLE_OMP_ORDERED: 13354 case GIMPLE_OMP_SCAN: 13355 case GIMPLE_OMP_CRITICAL: 13356 case GIMPLE_OMP_TARGET: 13357 case GIMPLE_OMP_TEAMS: 13358 case GIMPLE_OMP_TASKGROUP: 13359 /* The minimal context here is just the current OMP construct. */ 13360 inner_context = stmt; 13361 wi->info = inner_context; 13362 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi); 13363 wi->info = context; 13364 break; 13365 13366 case GIMPLE_OMP_FOR: 13367 inner_context = stmt; 13368 wi->info = inner_context; 13369 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to 13370 walk them. */ 13371 walk_gimple_seq (gimple_omp_for_pre_body (stmt), 13372 diagnose_sb_1, NULL, wi); 13373 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi); 13374 wi->info = context; 13375 break; 13376 13377 case GIMPLE_LABEL: 13378 splay_tree_insert (all_labels, 13379 (splay_tree_key) gimple_label_label ( 13380 as_a <glabel *> (stmt)), 13381 (splay_tree_value) context); 13382 break; 13383 13384 default: 13385 break; 13386 } 13387 13388 return NULL_TREE; 13389 } 13390 13391 /* Pass 2: Check each branch and see if its context differs from that of 13392 the destination label's context. */ 13393 13394 static tree 13395 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p, 13396 struct walk_stmt_info *wi) 13397 { 13398 gimple *context = (gimple *) wi->info; 13399 splay_tree_node n; 13400 gimple *stmt = gsi_stmt (*gsi_p); 13401 13402 *handled_ops_p = true; 13403 13404 switch (gimple_code (stmt)) 13405 { 13406 WALK_SUBSTMTS; 13407 13408 case GIMPLE_OMP_PARALLEL: 13409 case GIMPLE_OMP_TASK: 13410 case GIMPLE_OMP_SECTIONS: 13411 case GIMPLE_OMP_SINGLE: 13412 case GIMPLE_OMP_SECTION: 13413 case GIMPLE_OMP_MASTER: 13414 case GIMPLE_OMP_ORDERED: 13415 case GIMPLE_OMP_SCAN: 13416 case GIMPLE_OMP_CRITICAL: 13417 case GIMPLE_OMP_TARGET: 13418 case GIMPLE_OMP_TEAMS: 13419 case GIMPLE_OMP_TASKGROUP: 13420 wi->info = stmt; 13421 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi); 13422 wi->info = context; 13423 break; 13424 13425 case GIMPLE_OMP_FOR: 13426 wi->info = stmt; 13427 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to 13428 walk them. */ 13429 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt), 13430 diagnose_sb_2, NULL, wi); 13431 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi); 13432 wi->info = context; 13433 break; 13434 13435 case GIMPLE_COND: 13436 { 13437 gcond *cond_stmt = as_a <gcond *> (stmt); 13438 tree lab = gimple_cond_true_label (cond_stmt); 13439 if (lab) 13440 { 13441 n = splay_tree_lookup (all_labels, 13442 (splay_tree_key) lab); 13443 diagnose_sb_0 (gsi_p, context, 13444 n ? (gimple *) n->value : NULL); 13445 } 13446 lab = gimple_cond_false_label (cond_stmt); 13447 if (lab) 13448 { 13449 n = splay_tree_lookup (all_labels, 13450 (splay_tree_key) lab); 13451 diagnose_sb_0 (gsi_p, context, 13452 n ? (gimple *) n->value : NULL); 13453 } 13454 } 13455 break; 13456 13457 case GIMPLE_GOTO: 13458 { 13459 tree lab = gimple_goto_dest (stmt); 13460 if (TREE_CODE (lab) != LABEL_DECL) 13461 break; 13462 13463 n = splay_tree_lookup (all_labels, (splay_tree_key) lab); 13464 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL); 13465 } 13466 break; 13467 13468 case GIMPLE_SWITCH: 13469 { 13470 gswitch *switch_stmt = as_a <gswitch *> (stmt); 13471 unsigned int i; 13472 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i) 13473 { 13474 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i)); 13475 n = splay_tree_lookup (all_labels, (splay_tree_key) lab); 13476 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value)) 13477 break; 13478 } 13479 } 13480 break; 13481 13482 case GIMPLE_RETURN: 13483 diagnose_sb_0 (gsi_p, context, NULL); 13484 break; 13485 13486 default: 13487 break; 13488 } 13489 13490 return NULL_TREE; 13491 } 13492 13493 static unsigned int 13494 diagnose_omp_structured_block_errors (void) 13495 { 13496 struct walk_stmt_info wi; 13497 gimple_seq body = gimple_body (current_function_decl); 13498 13499 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0); 13500 13501 memset (&wi, 0, sizeof (wi)); 13502 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi); 13503 13504 memset (&wi, 0, sizeof (wi)); 13505 wi.want_locations = true; 13506 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi); 13507 13508 gimple_set_body (current_function_decl, body); 13509 13510 splay_tree_delete (all_labels); 13511 all_labels = NULL; 13512 13513 return 0; 13514 } 13515 13516 namespace { 13517 13518 const pass_data pass_data_diagnose_omp_blocks = 13519 { 13520 GIMPLE_PASS, /* type */ 13521 "*diagnose_omp_blocks", /* name */ 13522 OPTGROUP_OMP, /* optinfo_flags */ 13523 TV_NONE, /* tv_id */ 13524 PROP_gimple_any, /* properties_required */ 13525 0, /* properties_provided */ 13526 0, /* properties_destroyed */ 13527 0, /* todo_flags_start */ 13528 0, /* todo_flags_finish */ 13529 }; 13530 13531 class pass_diagnose_omp_blocks : public gimple_opt_pass 13532 { 13533 public: 13534 pass_diagnose_omp_blocks (gcc::context *ctxt) 13535 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt) 13536 {} 13537 13538 /* opt_pass methods: */ 13539 virtual bool gate (function *) 13540 { 13541 return flag_openacc || flag_openmp || flag_openmp_simd; 13542 } 13543 virtual unsigned int execute (function *) 13544 { 13545 return diagnose_omp_structured_block_errors (); 13546 } 13547 13548 }; // class pass_diagnose_omp_blocks 13549 13550 } // anon namespace 13551 13552 gimple_opt_pass * 13553 make_pass_diagnose_omp_blocks (gcc::context *ctxt) 13554 { 13555 return new pass_diagnose_omp_blocks (ctxt); 13556 } 13557 13558 13559 #include "gt-omp-low.h" 13560