1 /* C++-specific tree lowering bits; see also c-gimplify.c and gimple.c. 2 3 Copyright (C) 2002-2020 Free Software Foundation, Inc. 4 Contributed by Jason Merrill <jason@redhat.com> 5 6 This file is part of GCC. 7 8 GCC is free software; you can redistribute it and/or modify it under 9 the terms of the GNU General Public License as published by the Free 10 Software Foundation; either version 3, or (at your option) any later 11 version. 12 13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 14 WARRANTY; without even the implied warranty of MERCHANTABILITY or 15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16 for more details. 17 18 You should have received a copy of the GNU General Public License 19 along with GCC; see the file COPYING3. If not see 20 <http://www.gnu.org/licenses/>. */ 21 22 #include "config.h" 23 #include "system.h" 24 #include "coretypes.h" 25 #include "target.h" 26 #include "basic-block.h" 27 #include "cp-tree.h" 28 #include "gimple.h" 29 #include "predict.h" 30 #include "stor-layout.h" 31 #include "tree-iterator.h" 32 #include "gimplify.h" 33 #include "c-family/c-ubsan.h" 34 #include "stringpool.h" 35 #include "attribs.h" 36 #include "asan.h" 37 #include "gcc-rich-location.h" 38 #include "memmodel.h" 39 #include "tm_p.h" 40 #include "output.h" 41 #include "file-prefix-map.h" 42 #include "cgraph.h" 43 #include "omp-general.h" 44 45 /* Forward declarations. */ 46 47 static tree cp_genericize_r (tree *, int *, void *); 48 static tree cp_fold_r (tree *, int *, void *); 49 static void cp_genericize_tree (tree*, bool); 50 static tree cp_fold (tree); 51 52 /* Local declarations. */ 53 54 enum bc_t { bc_break = 0, bc_continue = 1 }; 55 56 /* Stack of labels which are targets for "break" or "continue", 57 linked through TREE_CHAIN. */ 58 static tree bc_label[2]; 59 60 /* Begin a scope which can be exited by a break or continue statement. BC 61 indicates which. 62 63 Just creates a label with location LOCATION and pushes it into the current 64 context. */ 65 66 static tree 67 begin_bc_block (enum bc_t bc, location_t location) 68 { 69 tree label = create_artificial_label (location); 70 DECL_CHAIN (label) = bc_label[bc]; 71 bc_label[bc] = label; 72 if (bc == bc_break) 73 LABEL_DECL_BREAK (label) = true; 74 else 75 LABEL_DECL_CONTINUE (label) = true; 76 return label; 77 } 78 79 /* Finish a scope which can be exited by a break or continue statement. 80 LABEL was returned from the most recent call to begin_bc_block. BLOCK is 81 an expression for the contents of the scope. 82 83 If we saw a break (or continue) in the scope, append a LABEL_EXPR to 84 BLOCK. Otherwise, just forget the label. */ 85 86 static void 87 finish_bc_block (tree *block, enum bc_t bc, tree label) 88 { 89 gcc_assert (label == bc_label[bc]); 90 91 if (TREE_USED (label)) 92 append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label), 93 block); 94 95 bc_label[bc] = DECL_CHAIN (label); 96 DECL_CHAIN (label) = NULL_TREE; 97 } 98 99 /* Get the LABEL_EXPR to represent a break or continue statement 100 in the current block scope. BC indicates which. */ 101 102 static tree 103 get_bc_label (enum bc_t bc) 104 { 105 tree label = bc_label[bc]; 106 107 /* Mark the label used for finish_bc_block. */ 108 TREE_USED (label) = 1; 109 return label; 110 } 111 112 /* Genericize a TRY_BLOCK. */ 113 114 static void 115 genericize_try_block (tree *stmt_p) 116 { 117 tree body = TRY_STMTS (*stmt_p); 118 tree cleanup = TRY_HANDLERS (*stmt_p); 119 120 *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup); 121 } 122 123 /* Genericize a HANDLER by converting to a CATCH_EXPR. */ 124 125 static void 126 genericize_catch_block (tree *stmt_p) 127 { 128 tree type = HANDLER_TYPE (*stmt_p); 129 tree body = HANDLER_BODY (*stmt_p); 130 131 /* FIXME should the caught type go in TREE_TYPE? */ 132 *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body); 133 } 134 135 /* A terser interface for building a representation of an exception 136 specification. */ 137 138 static tree 139 build_gimple_eh_filter_tree (tree body, tree allowed, tree failure) 140 { 141 tree t; 142 143 /* FIXME should the allowed types go in TREE_TYPE? */ 144 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE); 145 append_to_statement_list (failure, &EH_FILTER_FAILURE (t)); 146 147 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t); 148 append_to_statement_list (body, &TREE_OPERAND (t, 0)); 149 150 return t; 151 } 152 153 /* Genericize an EH_SPEC_BLOCK by converting it to a 154 TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */ 155 156 static void 157 genericize_eh_spec_block (tree *stmt_p) 158 { 159 tree body = EH_SPEC_STMTS (*stmt_p); 160 tree allowed = EH_SPEC_RAISES (*stmt_p); 161 tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ()); 162 163 *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure); 164 TREE_NO_WARNING (*stmt_p) = true; 165 TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true; 166 } 167 168 /* Return the first non-compound statement in STMT. */ 169 170 tree 171 first_stmt (tree stmt) 172 { 173 switch (TREE_CODE (stmt)) 174 { 175 case STATEMENT_LIST: 176 if (tree_statement_list_node *p = STATEMENT_LIST_HEAD (stmt)) 177 return first_stmt (p->stmt); 178 return void_node; 179 180 case BIND_EXPR: 181 return first_stmt (BIND_EXPR_BODY (stmt)); 182 183 default: 184 return stmt; 185 } 186 } 187 188 /* Genericize an IF_STMT by turning it into a COND_EXPR. */ 189 190 static void 191 genericize_if_stmt (tree *stmt_p) 192 { 193 tree stmt, cond, then_, else_; 194 location_t locus = EXPR_LOCATION (*stmt_p); 195 196 stmt = *stmt_p; 197 cond = IF_COND (stmt); 198 then_ = THEN_CLAUSE (stmt); 199 else_ = ELSE_CLAUSE (stmt); 200 201 if (then_ && else_) 202 { 203 tree ft = first_stmt (then_); 204 tree fe = first_stmt (else_); 205 br_predictor pr; 206 if (TREE_CODE (ft) == PREDICT_EXPR 207 && TREE_CODE (fe) == PREDICT_EXPR 208 && (pr = PREDICT_EXPR_PREDICTOR (ft)) == PREDICT_EXPR_PREDICTOR (fe) 209 && (pr == PRED_HOT_LABEL || pr == PRED_COLD_LABEL)) 210 { 211 gcc_rich_location richloc (EXPR_LOC_OR_LOC (ft, locus)); 212 richloc.add_range (EXPR_LOC_OR_LOC (fe, locus)); 213 warning_at (&richloc, OPT_Wattributes, 214 "both branches of %<if%> statement marked as %qs", 215 pr == PRED_HOT_LABEL ? "likely" : "unlikely"); 216 } 217 } 218 219 if (!then_) 220 then_ = build_empty_stmt (locus); 221 if (!else_) 222 else_ = build_empty_stmt (locus); 223 224 if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_)) 225 stmt = then_; 226 else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_)) 227 stmt = else_; 228 else 229 stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_); 230 protected_set_expr_location_if_unset (stmt, locus); 231 *stmt_p = stmt; 232 } 233 234 /* Build a generic representation of one of the C loop forms. COND is the 235 loop condition or NULL_TREE. BODY is the (possibly compound) statement 236 controlled by the loop. INCR is the increment expression of a for-loop, 237 or NULL_TREE. COND_IS_FIRST indicates whether the condition is 238 evaluated before the loop body as in while and for loops, or after the 239 loop body as in do-while loops. */ 240 241 static void 242 genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body, 243 tree incr, bool cond_is_first, int *walk_subtrees, 244 void *data) 245 { 246 tree blab, clab; 247 tree exit = NULL; 248 tree stmt_list = NULL; 249 tree debug_begin = NULL; 250 251 protected_set_expr_location_if_unset (incr, start_locus); 252 253 cp_walk_tree (&cond, cp_genericize_r, data, NULL); 254 cp_walk_tree (&incr, cp_genericize_r, data, NULL); 255 256 blab = begin_bc_block (bc_break, start_locus); 257 clab = begin_bc_block (bc_continue, start_locus); 258 259 cp_walk_tree (&body, cp_genericize_r, data, NULL); 260 *walk_subtrees = 0; 261 262 if (MAY_HAVE_DEBUG_MARKER_STMTS 263 && (!cond || !integer_zerop (cond))) 264 { 265 debug_begin = build0 (DEBUG_BEGIN_STMT, void_type_node); 266 SET_EXPR_LOCATION (debug_begin, cp_expr_loc_or_loc (cond, start_locus)); 267 } 268 269 if (cond && TREE_CODE (cond) != INTEGER_CST) 270 { 271 /* If COND is constant, don't bother building an exit. If it's false, 272 we won't build a loop. If it's true, any exits are in the body. */ 273 location_t cloc = cp_expr_loc_or_loc (cond, start_locus); 274 exit = build1_loc (cloc, GOTO_EXPR, void_type_node, 275 get_bc_label (bc_break)); 276 exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond, 277 build_empty_stmt (cloc), exit); 278 } 279 280 if (exit && cond_is_first) 281 { 282 append_to_statement_list (debug_begin, &stmt_list); 283 debug_begin = NULL_TREE; 284 append_to_statement_list (exit, &stmt_list); 285 } 286 append_to_statement_list (body, &stmt_list); 287 finish_bc_block (&stmt_list, bc_continue, clab); 288 if (incr) 289 { 290 if (MAY_HAVE_DEBUG_MARKER_STMTS) 291 { 292 tree d = build0 (DEBUG_BEGIN_STMT, void_type_node); 293 SET_EXPR_LOCATION (d, cp_expr_loc_or_loc (incr, start_locus)); 294 append_to_statement_list (d, &stmt_list); 295 } 296 append_to_statement_list (incr, &stmt_list); 297 } 298 append_to_statement_list (debug_begin, &stmt_list); 299 if (exit && !cond_is_first) 300 append_to_statement_list (exit, &stmt_list); 301 302 if (!stmt_list) 303 stmt_list = build_empty_stmt (start_locus); 304 305 tree loop; 306 if (cond && integer_zerop (cond)) 307 { 308 if (cond_is_first) 309 loop = fold_build3_loc (start_locus, COND_EXPR, 310 void_type_node, cond, stmt_list, 311 build_empty_stmt (start_locus)); 312 else 313 loop = stmt_list; 314 } 315 else 316 { 317 location_t loc = start_locus; 318 if (!cond || integer_nonzerop (cond)) 319 loc = EXPR_LOCATION (expr_first (body)); 320 if (loc == UNKNOWN_LOCATION) 321 loc = start_locus; 322 loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list); 323 } 324 325 stmt_list = NULL; 326 append_to_statement_list (loop, &stmt_list); 327 finish_bc_block (&stmt_list, bc_break, blab); 328 if (!stmt_list) 329 stmt_list = build_empty_stmt (start_locus); 330 331 *stmt_p = stmt_list; 332 } 333 334 /* Genericize a FOR_STMT node *STMT_P. */ 335 336 static void 337 genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data) 338 { 339 tree stmt = *stmt_p; 340 tree expr = NULL; 341 tree loop; 342 tree init = FOR_INIT_STMT (stmt); 343 344 if (init) 345 { 346 cp_walk_tree (&init, cp_genericize_r, data, NULL); 347 append_to_statement_list (init, &expr); 348 } 349 350 genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt), 351 FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data); 352 append_to_statement_list (loop, &expr); 353 if (expr == NULL_TREE) 354 expr = loop; 355 *stmt_p = expr; 356 } 357 358 /* Genericize a WHILE_STMT node *STMT_P. */ 359 360 static void 361 genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data) 362 { 363 tree stmt = *stmt_p; 364 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt), 365 WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data); 366 } 367 368 /* Genericize a DO_STMT node *STMT_P. */ 369 370 static void 371 genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data) 372 { 373 tree stmt = *stmt_p; 374 genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt), 375 DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data); 376 } 377 378 /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */ 379 380 static void 381 genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data) 382 { 383 tree stmt = *stmt_p; 384 tree break_block, body, cond, type; 385 location_t stmt_locus = EXPR_LOCATION (stmt); 386 387 body = SWITCH_STMT_BODY (stmt); 388 if (!body) 389 body = build_empty_stmt (stmt_locus); 390 cond = SWITCH_STMT_COND (stmt); 391 type = SWITCH_STMT_TYPE (stmt); 392 393 cp_walk_tree (&cond, cp_genericize_r, data, NULL); 394 395 break_block = begin_bc_block (bc_break, stmt_locus); 396 397 cp_walk_tree (&body, cp_genericize_r, data, NULL); 398 cp_walk_tree (&type, cp_genericize_r, data, NULL); 399 *walk_subtrees = 0; 400 401 if (TREE_USED (break_block)) 402 SWITCH_BREAK_LABEL_P (break_block) = 1; 403 finish_bc_block (&body, bc_break, break_block); 404 *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body); 405 SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt); 406 gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt) 407 || !TREE_USED (break_block)); 408 } 409 410 /* Genericize a CONTINUE_STMT node *STMT_P. */ 411 412 static void 413 genericize_continue_stmt (tree *stmt_p) 414 { 415 tree stmt_list = NULL; 416 tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN); 417 tree label = get_bc_label (bc_continue); 418 location_t location = EXPR_LOCATION (*stmt_p); 419 tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label); 420 append_to_statement_list_force (pred, &stmt_list); 421 append_to_statement_list (jump, &stmt_list); 422 *stmt_p = stmt_list; 423 } 424 425 /* Genericize a BREAK_STMT node *STMT_P. */ 426 427 static void 428 genericize_break_stmt (tree *stmt_p) 429 { 430 tree label = get_bc_label (bc_break); 431 location_t location = EXPR_LOCATION (*stmt_p); 432 *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label); 433 } 434 435 /* Genericize a OMP_FOR node *STMT_P. */ 436 437 static void 438 genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data) 439 { 440 tree stmt = *stmt_p; 441 location_t locus = EXPR_LOCATION (stmt); 442 tree clab = begin_bc_block (bc_continue, locus); 443 444 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL); 445 if (TREE_CODE (stmt) != OMP_TASKLOOP) 446 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL); 447 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL); 448 cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL); 449 cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL); 450 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL); 451 *walk_subtrees = 0; 452 453 finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab); 454 } 455 456 /* Hook into the middle of gimplifying an OMP_FOR node. */ 457 458 static enum gimplify_status 459 cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p) 460 { 461 tree for_stmt = *expr_p; 462 gimple_seq seq = NULL; 463 464 /* Protect ourselves from recursion. */ 465 if (OMP_FOR_GIMPLIFYING_P (for_stmt)) 466 return GS_UNHANDLED; 467 OMP_FOR_GIMPLIFYING_P (for_stmt) = 1; 468 469 gimplify_and_add (for_stmt, &seq); 470 gimple_seq_add_seq (pre_p, seq); 471 472 OMP_FOR_GIMPLIFYING_P (for_stmt) = 0; 473 474 return GS_ALL_DONE; 475 } 476 477 /* Gimplify an EXPR_STMT node. */ 478 479 static void 480 gimplify_expr_stmt (tree *stmt_p) 481 { 482 tree stmt = EXPR_STMT_EXPR (*stmt_p); 483 484 if (stmt == error_mark_node) 485 stmt = NULL; 486 487 /* Gimplification of a statement expression will nullify the 488 statement if all its side effects are moved to *PRE_P and *POST_P. 489 490 In this case we will not want to emit the gimplified statement. 491 However, we may still want to emit a warning, so we do that before 492 gimplification. */ 493 if (stmt && warn_unused_value) 494 { 495 if (!TREE_SIDE_EFFECTS (stmt)) 496 { 497 if (!IS_EMPTY_STMT (stmt) 498 && !VOID_TYPE_P (TREE_TYPE (stmt)) 499 && !TREE_NO_WARNING (stmt)) 500 warning (OPT_Wunused_value, "statement with no effect"); 501 } 502 else 503 warn_if_unused_value (stmt, input_location); 504 } 505 506 if (stmt == NULL_TREE) 507 stmt = alloc_stmt_list (); 508 509 *stmt_p = stmt; 510 } 511 512 /* Gimplify initialization from an AGGR_INIT_EXPR. */ 513 514 static void 515 cp_gimplify_init_expr (tree *expr_p) 516 { 517 tree from = TREE_OPERAND (*expr_p, 1); 518 tree to = TREE_OPERAND (*expr_p, 0); 519 tree t; 520 521 /* What about code that pulls out the temp and uses it elsewhere? I 522 think that such code never uses the TARGET_EXPR as an initializer. If 523 I'm wrong, we'll abort because the temp won't have any RTL. In that 524 case, I guess we'll need to replace references somehow. */ 525 if (TREE_CODE (from) == TARGET_EXPR && TARGET_EXPR_INITIAL (from)) 526 from = TARGET_EXPR_INITIAL (from); 527 528 /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them 529 inside the TARGET_EXPR. */ 530 for (t = from; t; ) 531 { 532 tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t; 533 534 /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and 535 replace the slot operand with our target. 536 537 Should we add a target parm to gimplify_expr instead? No, as in this 538 case we want to replace the INIT_EXPR. */ 539 if (TREE_CODE (sub) == AGGR_INIT_EXPR 540 || TREE_CODE (sub) == VEC_INIT_EXPR) 541 { 542 if (TREE_CODE (sub) == AGGR_INIT_EXPR) 543 AGGR_INIT_EXPR_SLOT (sub) = to; 544 else 545 VEC_INIT_EXPR_SLOT (sub) = to; 546 *expr_p = from; 547 548 /* The initialization is now a side-effect, so the container can 549 become void. */ 550 if (from != sub) 551 TREE_TYPE (from) = void_type_node; 552 } 553 554 /* Handle aggregate NSDMI. */ 555 replace_placeholders (sub, to); 556 557 if (t == sub) 558 break; 559 else 560 t = TREE_OPERAND (t, 1); 561 } 562 563 } 564 565 /* Gimplify a MUST_NOT_THROW_EXPR. */ 566 567 static enum gimplify_status 568 gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p) 569 { 570 tree stmt = *expr_p; 571 tree temp = voidify_wrapper_expr (stmt, NULL); 572 tree body = TREE_OPERAND (stmt, 0); 573 gimple_seq try_ = NULL; 574 gimple_seq catch_ = NULL; 575 gimple *mnt; 576 577 gimplify_and_add (body, &try_); 578 mnt = gimple_build_eh_must_not_throw (terminate_fn); 579 gimple_seq_add_stmt_without_update (&catch_, mnt); 580 mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH); 581 582 gimple_seq_add_stmt_without_update (pre_p, mnt); 583 if (temp) 584 { 585 *expr_p = temp; 586 return GS_OK; 587 } 588 589 *expr_p = NULL; 590 return GS_ALL_DONE; 591 } 592 593 /* Return TRUE if an operand (OP) of a given TYPE being copied is 594 really just an empty class copy. 595 596 Check that the operand has a simple form so that TARGET_EXPRs and 597 non-empty CONSTRUCTORs get reduced properly, and we leave the 598 return slot optimization alone because it isn't a copy. */ 599 600 bool 601 simple_empty_class_p (tree type, tree op, tree_code code) 602 { 603 if (TREE_CODE (op) == COMPOUND_EXPR) 604 return simple_empty_class_p (type, TREE_OPERAND (op, 1), code); 605 if (SIMPLE_TARGET_EXPR_P (op) 606 && TYPE_HAS_TRIVIAL_DESTRUCTOR (type)) 607 /* The TARGET_EXPR is itself a simple copy, look through it. */ 608 return simple_empty_class_p (type, TARGET_EXPR_INITIAL (op), code); 609 610 if (TREE_CODE (op) == PARM_DECL 611 && TREE_ADDRESSABLE (TREE_TYPE (op))) 612 { 613 tree fn = DECL_CONTEXT (op); 614 if (DECL_THUNK_P (fn) 615 || lambda_static_thunk_p (fn)) 616 /* In a thunk, we pass through invisible reference parms, so this isn't 617 actually a copy. */ 618 return false; 619 } 620 621 return 622 (TREE_CODE (op) == EMPTY_CLASS_EXPR 623 || code == MODIFY_EXPR 624 || is_gimple_lvalue (op) 625 || INDIRECT_REF_P (op) 626 || (TREE_CODE (op) == CONSTRUCTOR 627 && CONSTRUCTOR_NELTS (op) == 0) 628 || (TREE_CODE (op) == CALL_EXPR 629 && !CALL_EXPR_RETURN_SLOT_OPT (op))) 630 && !TREE_CLOBBER_P (op) 631 && is_really_empty_class (type, /*ignore_vptr*/true); 632 } 633 634 /* Returns true if evaluating E as an lvalue has side-effects; 635 specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really 636 have side-effects until there is a read or write through it. */ 637 638 static bool 639 lvalue_has_side_effects (tree e) 640 { 641 if (!TREE_SIDE_EFFECTS (e)) 642 return false; 643 while (handled_component_p (e)) 644 { 645 if (TREE_CODE (e) == ARRAY_REF 646 && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1))) 647 return true; 648 e = TREE_OPERAND (e, 0); 649 } 650 if (DECL_P (e)) 651 /* Just naming a variable has no side-effects. */ 652 return false; 653 else if (INDIRECT_REF_P (e)) 654 /* Similarly, indirection has no side-effects. */ 655 return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0)); 656 else 657 /* For anything else, trust TREE_SIDE_EFFECTS. */ 658 return TREE_SIDE_EFFECTS (e); 659 } 660 661 /* Gimplify *EXPR_P as rvalue into an expression that can't be modified 662 by expressions with side-effects in other operands. */ 663 664 static enum gimplify_status 665 gimplify_to_rvalue (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p, 666 bool (*gimple_test_f) (tree)) 667 { 668 enum gimplify_status t 669 = gimplify_expr (expr_p, pre_p, post_p, gimple_test_f, fb_rvalue); 670 if (t == GS_ERROR) 671 return GS_ERROR; 672 else if (is_gimple_variable (*expr_p) && TREE_CODE (*expr_p) != SSA_NAME) 673 *expr_p = get_initialized_tmp_var (*expr_p, pre_p); 674 return t; 675 } 676 677 /* Like gimplify_arg, but if ORDERED is set (which should be set if 678 any of the arguments this argument is sequenced before has 679 TREE_SIDE_EFFECTS set, make sure expressions with is_gimple_reg_type type 680 are gimplified into SSA_NAME or a fresh temporary and for 681 non-is_gimple_reg_type we don't optimize away TARGET_EXPRs. */ 682 683 static enum gimplify_status 684 cp_gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location, 685 bool ordered) 686 { 687 enum gimplify_status t; 688 if (ordered 689 && !is_gimple_reg_type (TREE_TYPE (*arg_p)) 690 && TREE_CODE (*arg_p) == TARGET_EXPR) 691 { 692 /* gimplify_arg would strip away the TARGET_EXPR, but 693 that can mean we don't copy the argument and some following 694 argument with side-effect could modify it. */ 695 protected_set_expr_location (*arg_p, call_location); 696 return gimplify_expr (arg_p, pre_p, NULL, is_gimple_lvalue, fb_either); 697 } 698 else 699 { 700 t = gimplify_arg (arg_p, pre_p, call_location); 701 if (t == GS_ERROR) 702 return GS_ERROR; 703 else if (ordered 704 && is_gimple_reg_type (TREE_TYPE (*arg_p)) 705 && is_gimple_variable (*arg_p) 706 && TREE_CODE (*arg_p) != SSA_NAME 707 /* No need to force references into register, references 708 can't be modified. */ 709 && !TYPE_REF_P (TREE_TYPE (*arg_p)) 710 /* And this can't be modified either. */ 711 && *arg_p != current_class_ptr) 712 *arg_p = get_initialized_tmp_var (*arg_p, pre_p); 713 return t; 714 } 715 716 } 717 718 /* Do C++-specific gimplification. Args are as for gimplify_expr. */ 719 720 int 721 cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 722 { 723 int saved_stmts_are_full_exprs_p = 0; 724 location_t loc = cp_expr_loc_or_input_loc (*expr_p); 725 enum tree_code code = TREE_CODE (*expr_p); 726 enum gimplify_status ret; 727 728 if (STATEMENT_CODE_P (code)) 729 { 730 saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p (); 731 current_stmt_tree ()->stmts_are_full_exprs_p 732 = STMT_IS_FULL_EXPR_P (*expr_p); 733 } 734 735 switch (code) 736 { 737 case AGGR_INIT_EXPR: 738 simplify_aggr_init_expr (expr_p); 739 ret = GS_OK; 740 break; 741 742 case VEC_INIT_EXPR: 743 { 744 location_t loc = input_location; 745 tree init = VEC_INIT_EXPR_INIT (*expr_p); 746 int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE); 747 gcc_assert (EXPR_HAS_LOCATION (*expr_p)); 748 input_location = EXPR_LOCATION (*expr_p); 749 *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE, 750 init, VEC_INIT_EXPR_VALUE_INIT (*expr_p), 751 from_array, 752 tf_warning_or_error); 753 hash_set<tree> pset; 754 cp_walk_tree (expr_p, cp_fold_r, &pset, NULL); 755 cp_genericize_tree (expr_p, false); 756 copy_if_shared (expr_p); 757 ret = GS_OK; 758 input_location = loc; 759 } 760 break; 761 762 case THROW_EXPR: 763 /* FIXME communicate throw type to back end, probably by moving 764 THROW_EXPR into ../tree.def. */ 765 *expr_p = TREE_OPERAND (*expr_p, 0); 766 ret = GS_OK; 767 break; 768 769 case MUST_NOT_THROW_EXPR: 770 ret = gimplify_must_not_throw_expr (expr_p, pre_p); 771 break; 772 773 /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the 774 LHS of an assignment might also be involved in the RHS, as in bug 775 25979. */ 776 case INIT_EXPR: 777 cp_gimplify_init_expr (expr_p); 778 if (TREE_CODE (*expr_p) != INIT_EXPR) 779 return GS_OK; 780 /* Fall through. */ 781 case MODIFY_EXPR: 782 modify_expr_case: 783 { 784 /* If the back end isn't clever enough to know that the lhs and rhs 785 types are the same, add an explicit conversion. */ 786 tree op0 = TREE_OPERAND (*expr_p, 0); 787 tree op1 = TREE_OPERAND (*expr_p, 1); 788 789 if (!error_operand_p (op0) 790 && !error_operand_p (op1) 791 && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0)) 792 || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1))) 793 && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0))) 794 TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR, 795 TREE_TYPE (op0), op1); 796 797 else if (simple_empty_class_p (TREE_TYPE (op0), op1, code)) 798 { 799 while (TREE_CODE (op1) == TARGET_EXPR) 800 /* We're disconnecting the initializer from its target, 801 don't create a temporary. */ 802 op1 = TARGET_EXPR_INITIAL (op1); 803 804 /* Remove any copies of empty classes. Also drop volatile 805 variables on the RHS to avoid infinite recursion from 806 gimplify_expr trying to load the value. */ 807 if (TREE_SIDE_EFFECTS (op1)) 808 { 809 if (TREE_THIS_VOLATILE (op1) 810 && (REFERENCE_CLASS_P (op1) || DECL_P (op1))) 811 op1 = build_fold_addr_expr (op1); 812 813 gimplify_and_add (op1, pre_p); 814 } 815 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 816 is_gimple_lvalue, fb_lvalue); 817 *expr_p = TREE_OPERAND (*expr_p, 0); 818 if (code == RETURN_EXPR && REFERENCE_CLASS_P (*expr_p)) 819 /* Avoid 'return *<retval>;' */ 820 *expr_p = TREE_OPERAND (*expr_p, 0); 821 } 822 /* P0145 says that the RHS is sequenced before the LHS. 823 gimplify_modify_expr gimplifies the RHS before the LHS, but that 824 isn't quite strong enough in two cases: 825 826 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would 827 mean it's evaluated after the LHS. 828 829 2) the value calculation of the RHS is also sequenced before the 830 LHS, so for scalar assignment we need to preevaluate if the 831 RHS could be affected by LHS side-effects even if it has no 832 side-effects of its own. We don't need this for classes because 833 class assignment takes its RHS by reference. */ 834 else if (flag_strong_eval_order > 1 835 && TREE_CODE (*expr_p) == MODIFY_EXPR 836 && lvalue_has_side_effects (op0) 837 && (TREE_CODE (op1) == CALL_EXPR 838 || (SCALAR_TYPE_P (TREE_TYPE (op1)) 839 && !TREE_CONSTANT (op1)))) 840 TREE_OPERAND (*expr_p, 1) = get_initialized_tmp_var (op1, pre_p); 841 } 842 ret = GS_OK; 843 break; 844 845 case EMPTY_CLASS_EXPR: 846 /* We create an empty CONSTRUCTOR with RECORD_TYPE. */ 847 *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL); 848 ret = GS_OK; 849 break; 850 851 case BASELINK: 852 *expr_p = BASELINK_FUNCTIONS (*expr_p); 853 ret = GS_OK; 854 break; 855 856 case TRY_BLOCK: 857 genericize_try_block (expr_p); 858 ret = GS_OK; 859 break; 860 861 case HANDLER: 862 genericize_catch_block (expr_p); 863 ret = GS_OK; 864 break; 865 866 case EH_SPEC_BLOCK: 867 genericize_eh_spec_block (expr_p); 868 ret = GS_OK; 869 break; 870 871 case USING_STMT: 872 gcc_unreachable (); 873 874 case FOR_STMT: 875 case WHILE_STMT: 876 case DO_STMT: 877 case SWITCH_STMT: 878 case CONTINUE_STMT: 879 case BREAK_STMT: 880 gcc_unreachable (); 881 882 case OMP_FOR: 883 case OMP_SIMD: 884 case OMP_DISTRIBUTE: 885 case OMP_LOOP: 886 case OMP_TASKLOOP: 887 ret = cp_gimplify_omp_for (expr_p, pre_p); 888 break; 889 890 case EXPR_STMT: 891 gimplify_expr_stmt (expr_p); 892 ret = GS_OK; 893 break; 894 895 case UNARY_PLUS_EXPR: 896 { 897 tree arg = TREE_OPERAND (*expr_p, 0); 898 tree type = TREE_TYPE (*expr_p); 899 *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg) 900 : arg; 901 ret = GS_OK; 902 } 903 break; 904 905 case CALL_EXPR: 906 ret = GS_OK; 907 if (flag_strong_eval_order == 2 908 && CALL_EXPR_FN (*expr_p) 909 && cp_get_callee_fndecl_nofold (*expr_p) == NULL_TREE) 910 { 911 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p)); 912 enum gimplify_status t 913 = gimplify_to_rvalue (&CALL_EXPR_FN (*expr_p), pre_p, NULL, 914 is_gimple_call_addr); 915 if (t == GS_ERROR) 916 ret = GS_ERROR; 917 /* GIMPLE considers most pointer conversion useless, but for 918 calls we actually care about the exact function pointer type. */ 919 else if (TREE_TYPE (CALL_EXPR_FN (*expr_p)) != fnptrtype) 920 CALL_EXPR_FN (*expr_p) 921 = build1 (NOP_EXPR, fnptrtype, CALL_EXPR_FN (*expr_p)); 922 } 923 if (!CALL_EXPR_FN (*expr_p)) 924 /* Internal function call. */; 925 else if (CALL_EXPR_REVERSE_ARGS (*expr_p)) 926 { 927 /* This is a call to a (compound) assignment operator that used 928 the operator syntax; gimplify the RHS first. */ 929 gcc_assert (call_expr_nargs (*expr_p) == 2); 930 gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p)); 931 enum gimplify_status t 932 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc, 933 TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, 0))); 934 if (t == GS_ERROR) 935 ret = GS_ERROR; 936 } 937 else if (CALL_EXPR_ORDERED_ARGS (*expr_p)) 938 { 939 /* Leave the last argument for gimplify_call_expr, to avoid problems 940 with __builtin_va_arg_pack(). */ 941 int nargs = call_expr_nargs (*expr_p) - 1; 942 int last_side_effects_arg = -1; 943 for (int i = nargs; i > 0; --i) 944 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i))) 945 { 946 last_side_effects_arg = i; 947 break; 948 } 949 for (int i = 0; i < nargs; ++i) 950 { 951 enum gimplify_status t 952 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc, 953 i < last_side_effects_arg); 954 if (t == GS_ERROR) 955 ret = GS_ERROR; 956 } 957 } 958 else if (flag_strong_eval_order 959 && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)) 960 { 961 /* If flag_strong_eval_order, evaluate the object argument first. */ 962 tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p)); 963 if (INDIRECT_TYPE_P (fntype)) 964 fntype = TREE_TYPE (fntype); 965 if (TREE_CODE (fntype) == METHOD_TYPE) 966 { 967 int nargs = call_expr_nargs (*expr_p); 968 bool side_effects = false; 969 for (int i = 1; i < nargs; ++i) 970 if (TREE_SIDE_EFFECTS (CALL_EXPR_ARG (*expr_p, i))) 971 { 972 side_effects = true; 973 break; 974 } 975 enum gimplify_status t 976 = cp_gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc, 977 side_effects); 978 if (t == GS_ERROR) 979 ret = GS_ERROR; 980 } 981 } 982 if (ret != GS_ERROR) 983 { 984 tree decl = cp_get_callee_fndecl_nofold (*expr_p); 985 if (decl 986 && fndecl_built_in_p (decl, CP_BUILT_IN_IS_CONSTANT_EVALUATED, 987 BUILT_IN_FRONTEND)) 988 *expr_p = boolean_false_node; 989 else if (decl 990 && fndecl_built_in_p (decl, CP_BUILT_IN_SOURCE_LOCATION, 991 BUILT_IN_FRONTEND)) 992 *expr_p = fold_builtin_source_location (EXPR_LOCATION (*expr_p)); 993 } 994 break; 995 996 case TARGET_EXPR: 997 /* A TARGET_EXPR that expresses direct-initialization should have been 998 elided by cp_gimplify_init_expr. */ 999 gcc_checking_assert (!TARGET_EXPR_DIRECT_INIT_P (*expr_p)); 1000 ret = GS_UNHANDLED; 1001 break; 1002 1003 case PTRMEM_CST: 1004 *expr_p = cplus_expand_constant (*expr_p); 1005 if (TREE_CODE (*expr_p) == PTRMEM_CST) 1006 ret = GS_ERROR; 1007 else 1008 ret = GS_OK; 1009 break; 1010 1011 case RETURN_EXPR: 1012 if (TREE_OPERAND (*expr_p, 0) 1013 && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR 1014 || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR)) 1015 { 1016 expr_p = &TREE_OPERAND (*expr_p, 0); 1017 /* Avoid going through the INIT_EXPR case, which can 1018 degrade INIT_EXPRs into AGGR_INIT_EXPRs. */ 1019 goto modify_expr_case; 1020 } 1021 /* Fall through. */ 1022 1023 default: 1024 ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p); 1025 break; 1026 } 1027 1028 /* Restore saved state. */ 1029 if (STATEMENT_CODE_P (code)) 1030 current_stmt_tree ()->stmts_are_full_exprs_p 1031 = saved_stmts_are_full_exprs_p; 1032 1033 return ret; 1034 } 1035 1036 static inline bool 1037 is_invisiref_parm (const_tree t) 1038 { 1039 return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL) 1040 && DECL_BY_REFERENCE (t)); 1041 } 1042 1043 /* Return true if the uid in both int tree maps are equal. */ 1044 1045 bool 1046 cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b) 1047 { 1048 return (a->uid == b->uid); 1049 } 1050 1051 /* Hash a UID in a cxx_int_tree_map. */ 1052 1053 unsigned int 1054 cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item) 1055 { 1056 return item->uid; 1057 } 1058 1059 /* A stable comparison routine for use with splay trees and DECLs. */ 1060 1061 static int 1062 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb) 1063 { 1064 tree a = (tree) xa; 1065 tree b = (tree) xb; 1066 1067 return DECL_UID (a) - DECL_UID (b); 1068 } 1069 1070 /* OpenMP context during genericization. */ 1071 1072 struct cp_genericize_omp_taskreg 1073 { 1074 bool is_parallel; 1075 bool default_shared; 1076 struct cp_genericize_omp_taskreg *outer; 1077 splay_tree variables; 1078 }; 1079 1080 /* Return true if genericization should try to determine if 1081 DECL is firstprivate or shared within task regions. */ 1082 1083 static bool 1084 omp_var_to_track (tree decl) 1085 { 1086 tree type = TREE_TYPE (decl); 1087 if (is_invisiref_parm (decl)) 1088 type = TREE_TYPE (type); 1089 else if (TYPE_REF_P (type)) 1090 type = TREE_TYPE (type); 1091 while (TREE_CODE (type) == ARRAY_TYPE) 1092 type = TREE_TYPE (type); 1093 if (type == error_mark_node || !CLASS_TYPE_P (type)) 1094 return false; 1095 if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl)) 1096 return false; 1097 if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED) 1098 return false; 1099 return true; 1100 } 1101 1102 /* Note DECL use in OpenMP region OMP_CTX during genericization. */ 1103 1104 static void 1105 omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl) 1106 { 1107 splay_tree_node n = splay_tree_lookup (omp_ctx->variables, 1108 (splay_tree_key) decl); 1109 if (n == NULL) 1110 { 1111 int flags = OMP_CLAUSE_DEFAULT_SHARED; 1112 if (omp_ctx->outer) 1113 omp_cxx_notice_variable (omp_ctx->outer, decl); 1114 if (!omp_ctx->default_shared) 1115 { 1116 struct cp_genericize_omp_taskreg *octx; 1117 1118 for (octx = omp_ctx->outer; octx; octx = octx->outer) 1119 { 1120 n = splay_tree_lookup (octx->variables, (splay_tree_key) decl); 1121 if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED) 1122 { 1123 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE; 1124 break; 1125 } 1126 if (octx->is_parallel) 1127 break; 1128 } 1129 if (octx == NULL 1130 && (TREE_CODE (decl) == PARM_DECL 1131 || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl)) 1132 && DECL_CONTEXT (decl) == current_function_decl))) 1133 flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE; 1134 if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE) 1135 { 1136 /* DECL is implicitly determined firstprivate in 1137 the current task construct. Ensure copy ctor and 1138 dtor are instantiated, because during gimplification 1139 it will be already too late. */ 1140 tree type = TREE_TYPE (decl); 1141 if (is_invisiref_parm (decl)) 1142 type = TREE_TYPE (type); 1143 else if (TYPE_REF_P (type)) 1144 type = TREE_TYPE (type); 1145 while (TREE_CODE (type) == ARRAY_TYPE) 1146 type = TREE_TYPE (type); 1147 get_copy_ctor (type, tf_none); 1148 get_dtor (type, tf_none); 1149 } 1150 } 1151 splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags); 1152 } 1153 } 1154 1155 /* Genericization context. */ 1156 1157 struct cp_genericize_data 1158 { 1159 hash_set<tree> *p_set; 1160 vec<tree> bind_expr_stack; 1161 struct cp_genericize_omp_taskreg *omp_ctx; 1162 tree try_block; 1163 bool no_sanitize_p; 1164 bool handle_invisiref_parm_p; 1165 }; 1166 1167 /* Perform any pre-gimplification folding of C++ front end trees to 1168 GENERIC. 1169 Note: The folding of none-omp cases is something to move into 1170 the middle-end. As for now we have most foldings only on GENERIC 1171 in fold-const, we need to perform this before transformation to 1172 GIMPLE-form. */ 1173 1174 static tree 1175 cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data) 1176 { 1177 tree stmt; 1178 enum tree_code code; 1179 1180 *stmt_p = stmt = cp_fold (*stmt_p); 1181 1182 if (((hash_set<tree> *) data)->add (stmt)) 1183 { 1184 /* Don't walk subtrees of stmts we've already walked once, otherwise 1185 we can have exponential complexity with e.g. lots of nested 1186 SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return 1187 always the same tree, which the first time cp_fold_r has been 1188 called on it had the subtrees walked. */ 1189 *walk_subtrees = 0; 1190 return NULL; 1191 } 1192 1193 code = TREE_CODE (stmt); 1194 if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE 1195 || code == OMP_LOOP || code == OMP_TASKLOOP || code == OACC_LOOP) 1196 { 1197 tree x; 1198 int i, n; 1199 1200 cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL); 1201 cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL); 1202 cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL); 1203 x = OMP_FOR_COND (stmt); 1204 if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison) 1205 { 1206 cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL); 1207 cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL); 1208 } 1209 else if (x && TREE_CODE (x) == TREE_VEC) 1210 { 1211 n = TREE_VEC_LENGTH (x); 1212 for (i = 0; i < n; i++) 1213 { 1214 tree o = TREE_VEC_ELT (x, i); 1215 if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison) 1216 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL); 1217 } 1218 } 1219 x = OMP_FOR_INCR (stmt); 1220 if (x && TREE_CODE (x) == TREE_VEC) 1221 { 1222 n = TREE_VEC_LENGTH (x); 1223 for (i = 0; i < n; i++) 1224 { 1225 tree o = TREE_VEC_ELT (x, i); 1226 if (o && TREE_CODE (o) == MODIFY_EXPR) 1227 o = TREE_OPERAND (o, 1); 1228 if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR 1229 || TREE_CODE (o) == POINTER_PLUS_EXPR)) 1230 { 1231 cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL); 1232 cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL); 1233 } 1234 } 1235 } 1236 cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL); 1237 *walk_subtrees = 0; 1238 } 1239 1240 return NULL; 1241 } 1242 1243 /* Fold ALL the trees! FIXME we should be able to remove this, but 1244 apparently that still causes optimization regressions. */ 1245 1246 void 1247 cp_fold_function (tree fndecl) 1248 { 1249 hash_set<tree> pset; 1250 cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL); 1251 } 1252 1253 /* Turn SPACESHIP_EXPR EXPR into GENERIC. */ 1254 1255 static tree genericize_spaceship (tree expr) 1256 { 1257 iloc_sentinel s (cp_expr_location (expr)); 1258 tree type = TREE_TYPE (expr); 1259 tree op0 = TREE_OPERAND (expr, 0); 1260 tree op1 = TREE_OPERAND (expr, 1); 1261 return genericize_spaceship (type, op0, op1); 1262 } 1263 1264 /* If EXPR involves an anonymous VLA type, prepend a DECL_EXPR for that type 1265 to trigger gimplify_type_sizes; otherwise a cast to pointer-to-VLA confuses 1266 the middle-end (c++/88256). */ 1267 1268 static tree 1269 predeclare_vla (tree expr) 1270 { 1271 tree type = TREE_TYPE (expr); 1272 if (type == error_mark_node) 1273 return expr; 1274 1275 /* We need to strip pointers for gimplify_type_sizes. */ 1276 tree vla = type; 1277 while (POINTER_TYPE_P (vla)) 1278 { 1279 if (TYPE_NAME (vla)) 1280 return expr; 1281 vla = TREE_TYPE (vla); 1282 } 1283 if (TYPE_NAME (vla) || !variably_modified_type_p (vla, NULL_TREE)) 1284 return expr; 1285 1286 tree decl = build_decl (input_location, TYPE_DECL, NULL_TREE, vla); 1287 DECL_ARTIFICIAL (decl) = 1; 1288 TYPE_NAME (vla) = decl; 1289 tree dexp = build_stmt (input_location, DECL_EXPR, decl); 1290 expr = build2 (COMPOUND_EXPR, type, dexp, expr); 1291 return expr; 1292 } 1293 1294 /* Perform any pre-gimplification lowering of C++ front end trees to 1295 GENERIC. */ 1296 1297 static tree 1298 cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data) 1299 { 1300 tree stmt = *stmt_p; 1301 struct cp_genericize_data *wtd = (struct cp_genericize_data *) data; 1302 hash_set<tree> *p_set = wtd->p_set; 1303 1304 /* If in an OpenMP context, note var uses. */ 1305 if (__builtin_expect (wtd->omp_ctx != NULL, 0) 1306 && (VAR_P (stmt) 1307 || TREE_CODE (stmt) == PARM_DECL 1308 || TREE_CODE (stmt) == RESULT_DECL) 1309 && omp_var_to_track (stmt)) 1310 omp_cxx_notice_variable (wtd->omp_ctx, stmt); 1311 1312 /* Don't dereference parms in a thunk, pass the references through. */ 1313 if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt)) 1314 || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt))) 1315 { 1316 *walk_subtrees = 0; 1317 return NULL; 1318 } 1319 1320 /* Dereference invisible reference parms. */ 1321 if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt)) 1322 { 1323 *stmt_p = convert_from_reference (stmt); 1324 p_set->add (*stmt_p); 1325 *walk_subtrees = 0; 1326 return NULL; 1327 } 1328 1329 /* Map block scope extern declarations to visible declarations with the 1330 same name and type in outer scopes if any. */ 1331 if (cp_function_chain->extern_decl_map 1332 && VAR_OR_FUNCTION_DECL_P (stmt) 1333 && DECL_EXTERNAL (stmt)) 1334 { 1335 struct cxx_int_tree_map *h, in; 1336 in.uid = DECL_UID (stmt); 1337 h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid); 1338 if (h) 1339 { 1340 *stmt_p = h->to; 1341 TREE_USED (h->to) |= TREE_USED (stmt); 1342 *walk_subtrees = 0; 1343 return NULL; 1344 } 1345 } 1346 1347 if (TREE_CODE (stmt) == INTEGER_CST 1348 && TYPE_REF_P (TREE_TYPE (stmt)) 1349 && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT)) 1350 && !wtd->no_sanitize_p) 1351 { 1352 ubsan_maybe_instrument_reference (stmt_p); 1353 if (*stmt_p != stmt) 1354 { 1355 *walk_subtrees = 0; 1356 return NULL_TREE; 1357 } 1358 } 1359 1360 /* Other than invisiref parms, don't walk the same tree twice. */ 1361 if (p_set->contains (stmt)) 1362 { 1363 *walk_subtrees = 0; 1364 return NULL_TREE; 1365 } 1366 1367 switch (TREE_CODE (stmt)) 1368 { 1369 case ADDR_EXPR: 1370 if (is_invisiref_parm (TREE_OPERAND (stmt, 0))) 1371 { 1372 /* If in an OpenMP context, note var uses. */ 1373 if (__builtin_expect (wtd->omp_ctx != NULL, 0) 1374 && omp_var_to_track (TREE_OPERAND (stmt, 0))) 1375 omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0)); 1376 *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0)); 1377 *walk_subtrees = 0; 1378 } 1379 break; 1380 1381 case RETURN_EXPR: 1382 if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0))) 1383 /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */ 1384 *walk_subtrees = 0; 1385 break; 1386 1387 case OMP_CLAUSE: 1388 switch (OMP_CLAUSE_CODE (stmt)) 1389 { 1390 case OMP_CLAUSE_LASTPRIVATE: 1391 /* Don't dereference an invisiref in OpenMP clauses. */ 1392 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) 1393 { 1394 *walk_subtrees = 0; 1395 if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt)) 1396 cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt), 1397 cp_genericize_r, data, NULL); 1398 } 1399 break; 1400 case OMP_CLAUSE_PRIVATE: 1401 /* Don't dereference an invisiref in OpenMP clauses. */ 1402 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) 1403 *walk_subtrees = 0; 1404 else if (wtd->omp_ctx != NULL) 1405 { 1406 /* Private clause doesn't cause any references to the 1407 var in outer contexts, avoid calling 1408 omp_cxx_notice_variable for it. */ 1409 struct cp_genericize_omp_taskreg *old = wtd->omp_ctx; 1410 wtd->omp_ctx = NULL; 1411 cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r, 1412 data, NULL); 1413 wtd->omp_ctx = old; 1414 *walk_subtrees = 0; 1415 } 1416 break; 1417 case OMP_CLAUSE_SHARED: 1418 case OMP_CLAUSE_FIRSTPRIVATE: 1419 case OMP_CLAUSE_COPYIN: 1420 case OMP_CLAUSE_COPYPRIVATE: 1421 case OMP_CLAUSE_INCLUSIVE: 1422 case OMP_CLAUSE_EXCLUSIVE: 1423 /* Don't dereference an invisiref in OpenMP clauses. */ 1424 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) 1425 *walk_subtrees = 0; 1426 break; 1427 case OMP_CLAUSE_REDUCTION: 1428 case OMP_CLAUSE_IN_REDUCTION: 1429 case OMP_CLAUSE_TASK_REDUCTION: 1430 /* Don't dereference an invisiref in reduction clause's 1431 OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE} 1432 still needs to be genericized. */ 1433 if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) 1434 { 1435 *walk_subtrees = 0; 1436 if (OMP_CLAUSE_REDUCTION_INIT (stmt)) 1437 cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt), 1438 cp_genericize_r, data, NULL); 1439 if (OMP_CLAUSE_REDUCTION_MERGE (stmt)) 1440 cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt), 1441 cp_genericize_r, data, NULL); 1442 } 1443 break; 1444 default: 1445 break; 1446 } 1447 break; 1448 1449 /* Due to the way voidify_wrapper_expr is written, we don't get a chance 1450 to lower this construct before scanning it, so we need to lower these 1451 before doing anything else. */ 1452 case CLEANUP_STMT: 1453 *stmt_p = build2_loc (EXPR_LOCATION (stmt), 1454 CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR 1455 : TRY_FINALLY_EXPR, 1456 void_type_node, 1457 CLEANUP_BODY (stmt), 1458 CLEANUP_EXPR (stmt)); 1459 break; 1460 1461 case IF_STMT: 1462 genericize_if_stmt (stmt_p); 1463 /* *stmt_p has changed, tail recurse to handle it again. */ 1464 return cp_genericize_r (stmt_p, walk_subtrees, data); 1465 1466 /* COND_EXPR might have incompatible types in branches if one or both 1467 arms are bitfields. Fix it up now. */ 1468 case COND_EXPR: 1469 { 1470 tree type_left 1471 = (TREE_OPERAND (stmt, 1) 1472 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1)) 1473 : NULL_TREE); 1474 tree type_right 1475 = (TREE_OPERAND (stmt, 2) 1476 ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2)) 1477 : NULL_TREE); 1478 if (type_left 1479 && !useless_type_conversion_p (TREE_TYPE (stmt), 1480 TREE_TYPE (TREE_OPERAND (stmt, 1)))) 1481 { 1482 TREE_OPERAND (stmt, 1) 1483 = fold_convert (type_left, TREE_OPERAND (stmt, 1)); 1484 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), 1485 type_left)); 1486 } 1487 if (type_right 1488 && !useless_type_conversion_p (TREE_TYPE (stmt), 1489 TREE_TYPE (TREE_OPERAND (stmt, 2)))) 1490 { 1491 TREE_OPERAND (stmt, 2) 1492 = fold_convert (type_right, TREE_OPERAND (stmt, 2)); 1493 gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), 1494 type_right)); 1495 } 1496 } 1497 break; 1498 1499 case BIND_EXPR: 1500 if (__builtin_expect (wtd->omp_ctx != NULL, 0)) 1501 { 1502 tree decl; 1503 for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl)) 1504 if (VAR_P (decl) 1505 && !DECL_EXTERNAL (decl) 1506 && omp_var_to_track (decl)) 1507 { 1508 splay_tree_node n 1509 = splay_tree_lookup (wtd->omp_ctx->variables, 1510 (splay_tree_key) decl); 1511 if (n == NULL) 1512 splay_tree_insert (wtd->omp_ctx->variables, 1513 (splay_tree_key) decl, 1514 TREE_STATIC (decl) 1515 ? OMP_CLAUSE_DEFAULT_SHARED 1516 : OMP_CLAUSE_DEFAULT_PRIVATE); 1517 } 1518 } 1519 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR)) 1520 { 1521 /* The point here is to not sanitize static initializers. */ 1522 bool no_sanitize_p = wtd->no_sanitize_p; 1523 wtd->no_sanitize_p = true; 1524 for (tree decl = BIND_EXPR_VARS (stmt); 1525 decl; 1526 decl = DECL_CHAIN (decl)) 1527 if (VAR_P (decl) 1528 && TREE_STATIC (decl) 1529 && DECL_INITIAL (decl)) 1530 cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL); 1531 wtd->no_sanitize_p = no_sanitize_p; 1532 } 1533 wtd->bind_expr_stack.safe_push (stmt); 1534 cp_walk_tree (&BIND_EXPR_BODY (stmt), 1535 cp_genericize_r, data, NULL); 1536 wtd->bind_expr_stack.pop (); 1537 break; 1538 1539 case USING_STMT: 1540 { 1541 tree block = NULL_TREE; 1542 1543 /* Get the innermost inclosing GIMPLE_BIND that has a non NULL 1544 BLOCK, and append an IMPORTED_DECL to its 1545 BLOCK_VARS chained list. */ 1546 if (wtd->bind_expr_stack.exists ()) 1547 { 1548 int i; 1549 for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--) 1550 if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i]))) 1551 break; 1552 } 1553 if (block) 1554 { 1555 tree decl = TREE_OPERAND (stmt, 0); 1556 gcc_assert (decl); 1557 1558 if (undeduced_auto_decl (decl)) 1559 /* Omit from the GENERIC, the back-end can't handle it. */; 1560 else 1561 { 1562 tree using_directive = make_node (IMPORTED_DECL); 1563 TREE_TYPE (using_directive) = void_type_node; 1564 1565 IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl; 1566 DECL_CHAIN (using_directive) = BLOCK_VARS (block); 1567 BLOCK_VARS (block) = using_directive; 1568 } 1569 } 1570 /* The USING_STMT won't appear in GENERIC. */ 1571 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); 1572 *walk_subtrees = 0; 1573 } 1574 break; 1575 1576 case DECL_EXPR: 1577 if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL) 1578 { 1579 /* Using decls inside DECL_EXPRs are just dropped on the floor. */ 1580 *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); 1581 *walk_subtrees = 0; 1582 } 1583 else 1584 { 1585 tree d = DECL_EXPR_DECL (stmt); 1586 if (VAR_P (d)) 1587 gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d)); 1588 } 1589 break; 1590 1591 case OMP_PARALLEL: 1592 case OMP_TASK: 1593 case OMP_TASKLOOP: 1594 { 1595 struct cp_genericize_omp_taskreg omp_ctx; 1596 tree c, decl; 1597 splay_tree_node n; 1598 1599 *walk_subtrees = 0; 1600 cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL); 1601 omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL; 1602 omp_ctx.default_shared = omp_ctx.is_parallel; 1603 omp_ctx.outer = wtd->omp_ctx; 1604 omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); 1605 wtd->omp_ctx = &omp_ctx; 1606 for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c)) 1607 switch (OMP_CLAUSE_CODE (c)) 1608 { 1609 case OMP_CLAUSE_SHARED: 1610 case OMP_CLAUSE_PRIVATE: 1611 case OMP_CLAUSE_FIRSTPRIVATE: 1612 case OMP_CLAUSE_LASTPRIVATE: 1613 decl = OMP_CLAUSE_DECL (c); 1614 if (decl == error_mark_node || !omp_var_to_track (decl)) 1615 break; 1616 n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl); 1617 if (n != NULL) 1618 break; 1619 splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl, 1620 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED 1621 ? OMP_CLAUSE_DEFAULT_SHARED 1622 : OMP_CLAUSE_DEFAULT_PRIVATE); 1623 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer) 1624 omp_cxx_notice_variable (omp_ctx.outer, decl); 1625 break; 1626 case OMP_CLAUSE_DEFAULT: 1627 if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED) 1628 omp_ctx.default_shared = true; 1629 default: 1630 break; 1631 } 1632 if (TREE_CODE (stmt) == OMP_TASKLOOP) 1633 genericize_omp_for_stmt (stmt_p, walk_subtrees, data); 1634 else 1635 cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL); 1636 wtd->omp_ctx = omp_ctx.outer; 1637 splay_tree_delete (omp_ctx.variables); 1638 } 1639 break; 1640 1641 case TRY_BLOCK: 1642 { 1643 *walk_subtrees = 0; 1644 tree try_block = wtd->try_block; 1645 wtd->try_block = stmt; 1646 cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL); 1647 wtd->try_block = try_block; 1648 cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL); 1649 } 1650 break; 1651 1652 case MUST_NOT_THROW_EXPR: 1653 /* MUST_NOT_THROW_COND might be something else with TM. */ 1654 if (MUST_NOT_THROW_COND (stmt) == NULL_TREE) 1655 { 1656 *walk_subtrees = 0; 1657 tree try_block = wtd->try_block; 1658 wtd->try_block = stmt; 1659 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL); 1660 wtd->try_block = try_block; 1661 } 1662 break; 1663 1664 case THROW_EXPR: 1665 { 1666 location_t loc = location_of (stmt); 1667 if (TREE_NO_WARNING (stmt)) 1668 /* Never mind. */; 1669 else if (wtd->try_block) 1670 { 1671 if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR) 1672 { 1673 auto_diagnostic_group d; 1674 if (warning_at (loc, OPT_Wterminate, 1675 "%<throw%> will always call %<terminate%>") 1676 && cxx_dialect >= cxx11 1677 && DECL_DESTRUCTOR_P (current_function_decl)) 1678 inform (loc, "in C++11 destructors default to %<noexcept%>"); 1679 } 1680 } 1681 else 1682 { 1683 if (warn_cxx11_compat && cxx_dialect < cxx11 1684 && DECL_DESTRUCTOR_P (current_function_decl) 1685 && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl)) 1686 == NULL_TREE) 1687 && (get_defaulted_eh_spec (current_function_decl) 1688 == empty_except_spec)) 1689 warning_at (loc, OPT_Wc__11_compat, 1690 "in C++11 this %<throw%> will call %<terminate%> " 1691 "because destructors default to %<noexcept%>"); 1692 } 1693 } 1694 break; 1695 1696 case CONVERT_EXPR: 1697 gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt)); 1698 break; 1699 1700 case FOR_STMT: 1701 genericize_for_stmt (stmt_p, walk_subtrees, data); 1702 break; 1703 1704 case WHILE_STMT: 1705 genericize_while_stmt (stmt_p, walk_subtrees, data); 1706 break; 1707 1708 case DO_STMT: 1709 genericize_do_stmt (stmt_p, walk_subtrees, data); 1710 break; 1711 1712 case SWITCH_STMT: 1713 genericize_switch_stmt (stmt_p, walk_subtrees, data); 1714 break; 1715 1716 case CONTINUE_STMT: 1717 genericize_continue_stmt (stmt_p); 1718 break; 1719 1720 case BREAK_STMT: 1721 genericize_break_stmt (stmt_p); 1722 break; 1723 1724 case SPACESHIP_EXPR: 1725 *stmt_p = genericize_spaceship (*stmt_p); 1726 break; 1727 1728 case OMP_DISTRIBUTE: 1729 /* Need to explicitly instantiate copy ctors on class iterators of 1730 composite distribute parallel for. */ 1731 if (OMP_FOR_INIT (*stmt_p) == NULL_TREE) 1732 { 1733 tree *data[4] = { NULL, NULL, NULL, NULL }; 1734 tree inner = walk_tree (&OMP_FOR_BODY (*stmt_p), 1735 find_combined_omp_for, data, NULL); 1736 if (inner != NULL_TREE 1737 && TREE_CODE (inner) == OMP_FOR) 1738 { 1739 for (int i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner)); i++) 1740 if (OMP_FOR_ORIG_DECLS (inner) 1741 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), 1742 i)) == TREE_LIST 1743 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), 1744 i))) 1745 { 1746 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner), i); 1747 /* Class iterators aren't allowed on OMP_SIMD, so the only 1748 case we need to solve is distribute parallel for. */ 1749 gcc_assert (TREE_CODE (inner) == OMP_FOR 1750 && data[1]); 1751 tree orig_decl = TREE_PURPOSE (orig); 1752 tree c, cl = NULL_TREE; 1753 for (c = OMP_FOR_CLAUSES (inner); 1754 c; c = OMP_CLAUSE_CHAIN (c)) 1755 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE 1756 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE) 1757 && OMP_CLAUSE_DECL (c) == orig_decl) 1758 { 1759 cl = c; 1760 break; 1761 } 1762 if (cl == NULL_TREE) 1763 { 1764 for (c = OMP_PARALLEL_CLAUSES (*data[1]); 1765 c; c = OMP_CLAUSE_CHAIN (c)) 1766 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE 1767 && OMP_CLAUSE_DECL (c) == orig_decl) 1768 { 1769 cl = c; 1770 break; 1771 } 1772 } 1773 if (cl) 1774 { 1775 orig_decl = require_complete_type (orig_decl); 1776 tree inner_type = TREE_TYPE (orig_decl); 1777 if (orig_decl == error_mark_node) 1778 continue; 1779 if (TYPE_REF_P (TREE_TYPE (orig_decl))) 1780 inner_type = TREE_TYPE (inner_type); 1781 1782 while (TREE_CODE (inner_type) == ARRAY_TYPE) 1783 inner_type = TREE_TYPE (inner_type); 1784 get_copy_ctor (inner_type, tf_warning_or_error); 1785 } 1786 } 1787 } 1788 } 1789 /* FALLTHRU */ 1790 case OMP_FOR: 1791 case OMP_SIMD: 1792 case OMP_LOOP: 1793 case OACC_LOOP: 1794 genericize_omp_for_stmt (stmt_p, walk_subtrees, data); 1795 break; 1796 1797 case PTRMEM_CST: 1798 /* By the time we get here we're handing off to the back end, so we don't 1799 need or want to preserve PTRMEM_CST anymore. */ 1800 *stmt_p = cplus_expand_constant (stmt); 1801 *walk_subtrees = 0; 1802 break; 1803 1804 case MEM_REF: 1805 /* For MEM_REF, make sure not to sanitize the second operand even 1806 if it has reference type. It is just an offset with a type 1807 holding other information. There is no other processing we 1808 need to do for INTEGER_CSTs, so just ignore the second argument 1809 unconditionally. */ 1810 cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL); 1811 *walk_subtrees = 0; 1812 break; 1813 1814 case NOP_EXPR: 1815 *stmt_p = predeclare_vla (*stmt_p); 1816 if (!wtd->no_sanitize_p 1817 && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT) 1818 && TYPE_REF_P (TREE_TYPE (stmt))) 1819 ubsan_maybe_instrument_reference (stmt_p); 1820 break; 1821 1822 case CALL_EXPR: 1823 /* Evaluate function concept checks instead of treating them as 1824 normal functions. */ 1825 if (concept_check_p (stmt)) 1826 { 1827 *stmt_p = evaluate_concept_check (stmt, tf_warning_or_error); 1828 * walk_subtrees = 0; 1829 break; 1830 } 1831 1832 if (!wtd->no_sanitize_p 1833 && sanitize_flags_p ((SANITIZE_NULL 1834 | SANITIZE_ALIGNMENT | SANITIZE_VPTR))) 1835 { 1836 tree fn = CALL_EXPR_FN (stmt); 1837 if (fn != NULL_TREE 1838 && !error_operand_p (fn) 1839 && INDIRECT_TYPE_P (TREE_TYPE (fn)) 1840 && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE) 1841 { 1842 bool is_ctor 1843 = TREE_CODE (fn) == ADDR_EXPR 1844 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL 1845 && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0)); 1846 if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)) 1847 ubsan_maybe_instrument_member_call (stmt, is_ctor); 1848 if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor) 1849 cp_ubsan_maybe_instrument_member_call (stmt); 1850 } 1851 else if (fn == NULL_TREE 1852 && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL 1853 && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST 1854 && TYPE_REF_P (TREE_TYPE (CALL_EXPR_ARG (stmt, 0)))) 1855 *walk_subtrees = 0; 1856 } 1857 /* Fall through. */ 1858 case AGGR_INIT_EXPR: 1859 /* For calls to a multi-versioned function, overload resolution 1860 returns the function with the highest target priority, that is, 1861 the version that will checked for dispatching first. If this 1862 version is inlinable, a direct call to this version can be made 1863 otherwise the call should go through the dispatcher. */ 1864 { 1865 tree fn = cp_get_callee_fndecl_nofold (stmt); 1866 if (fn && DECL_FUNCTION_VERSIONED (fn) 1867 && (current_function_decl == NULL 1868 || !targetm.target_option.can_inline_p (current_function_decl, 1869 fn))) 1870 if (tree dis = get_function_version_dispatcher (fn)) 1871 { 1872 mark_versions_used (dis); 1873 dis = build_address (dis); 1874 if (TREE_CODE (stmt) == CALL_EXPR) 1875 CALL_EXPR_FN (stmt) = dis; 1876 else 1877 AGGR_INIT_EXPR_FN (stmt) = dis; 1878 } 1879 } 1880 break; 1881 1882 case TARGET_EXPR: 1883 if (TARGET_EXPR_INITIAL (stmt) 1884 && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR 1885 && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt))) 1886 TARGET_EXPR_NO_ELIDE (stmt) = 1; 1887 break; 1888 1889 case REQUIRES_EXPR: 1890 /* Emit the value of the requires-expression. */ 1891 *stmt_p = constant_boolean_node (constraints_satisfied_p (stmt), 1892 boolean_type_node); 1893 *walk_subtrees = 0; 1894 break; 1895 1896 case TEMPLATE_ID_EXPR: 1897 gcc_assert (concept_check_p (stmt)); 1898 /* Emit the value of the concept check. */ 1899 *stmt_p = evaluate_concept_check (stmt, tf_warning_or_error); 1900 walk_subtrees = 0; 1901 break; 1902 1903 case STATEMENT_LIST: 1904 if (TREE_SIDE_EFFECTS (stmt)) 1905 { 1906 tree_stmt_iterator i; 1907 int nondebug_stmts = 0; 1908 bool clear_side_effects = true; 1909 /* Genericization can clear TREE_SIDE_EFFECTS, e.g. when 1910 transforming an IF_STMT into COND_EXPR. If such stmt 1911 appears in a STATEMENT_LIST that contains only that 1912 stmt and some DEBUG_BEGIN_STMTs, without -g where the 1913 STATEMENT_LIST wouldn't be present at all the resulting 1914 expression wouldn't have TREE_SIDE_EFFECTS set, so make sure 1915 to clear it even on the STATEMENT_LIST in such cases. */ 1916 for (i = tsi_start (stmt); !tsi_end_p (i); tsi_next (&i)) 1917 { 1918 tree t = tsi_stmt (i); 1919 if (TREE_CODE (t) != DEBUG_BEGIN_STMT && nondebug_stmts < 2) 1920 nondebug_stmts++; 1921 cp_walk_tree (tsi_stmt_ptr (i), cp_genericize_r, data, NULL); 1922 if (TREE_CODE (t) != DEBUG_BEGIN_STMT 1923 && (nondebug_stmts > 1 || TREE_SIDE_EFFECTS (tsi_stmt (i)))) 1924 clear_side_effects = false; 1925 } 1926 if (clear_side_effects) 1927 TREE_SIDE_EFFECTS (stmt) = 0; 1928 *walk_subtrees = 0; 1929 } 1930 break; 1931 1932 default: 1933 if (IS_TYPE_OR_DECL_P (stmt)) 1934 *walk_subtrees = 0; 1935 break; 1936 } 1937 1938 p_set->add (*stmt_p); 1939 1940 return NULL; 1941 } 1942 1943 /* Lower C++ front end trees to GENERIC in T_P. */ 1944 1945 static void 1946 cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p) 1947 { 1948 struct cp_genericize_data wtd; 1949 1950 wtd.p_set = new hash_set<tree>; 1951 wtd.bind_expr_stack.create (0); 1952 wtd.omp_ctx = NULL; 1953 wtd.try_block = NULL_TREE; 1954 wtd.no_sanitize_p = false; 1955 wtd.handle_invisiref_parm_p = handle_invisiref_parm_p; 1956 cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL); 1957 delete wtd.p_set; 1958 wtd.bind_expr_stack.release (); 1959 if (sanitize_flags_p (SANITIZE_VPTR)) 1960 cp_ubsan_instrument_member_accesses (t_p); 1961 } 1962 1963 /* If a function that should end with a return in non-void 1964 function doesn't obviously end with return, add ubsan 1965 instrumentation code to verify it at runtime. If -fsanitize=return 1966 is not enabled, instrument __builtin_unreachable. */ 1967 1968 static void 1969 cp_maybe_instrument_return (tree fndecl) 1970 { 1971 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl))) 1972 || DECL_CONSTRUCTOR_P (fndecl) 1973 || DECL_DESTRUCTOR_P (fndecl) 1974 || !targetm.warn_func_return (fndecl)) 1975 return; 1976 1977 if (!sanitize_flags_p (SANITIZE_RETURN, fndecl) 1978 /* Don't add __builtin_unreachable () if not optimizing, it will not 1979 improve any optimizations in that case, just break UB code. 1980 Don't add it if -fsanitize=unreachable -fno-sanitize=return either, 1981 UBSan covers this with ubsan_instrument_return above where sufficient 1982 information is provided, while the __builtin_unreachable () below 1983 if return sanitization is disabled will just result in hard to 1984 understand runtime error without location. */ 1985 && (!optimize 1986 || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl))) 1987 return; 1988 1989 tree t = DECL_SAVED_TREE (fndecl); 1990 while (t) 1991 { 1992 switch (TREE_CODE (t)) 1993 { 1994 case BIND_EXPR: 1995 t = BIND_EXPR_BODY (t); 1996 continue; 1997 case TRY_FINALLY_EXPR: 1998 case CLEANUP_POINT_EXPR: 1999 t = TREE_OPERAND (t, 0); 2000 continue; 2001 case STATEMENT_LIST: 2002 { 2003 tree_stmt_iterator i = tsi_last (t); 2004 while (!tsi_end_p (i)) 2005 { 2006 tree p = tsi_stmt (i); 2007 if (TREE_CODE (p) != DEBUG_BEGIN_STMT) 2008 break; 2009 tsi_prev (&i); 2010 } 2011 if (!tsi_end_p (i)) 2012 { 2013 t = tsi_stmt (i); 2014 continue; 2015 } 2016 } 2017 break; 2018 case RETURN_EXPR: 2019 return; 2020 default: 2021 break; 2022 } 2023 break; 2024 } 2025 if (t == NULL_TREE) 2026 return; 2027 tree *p = &DECL_SAVED_TREE (fndecl); 2028 if (TREE_CODE (*p) == BIND_EXPR) 2029 p = &BIND_EXPR_BODY (*p); 2030 2031 location_t loc = DECL_SOURCE_LOCATION (fndecl); 2032 if (sanitize_flags_p (SANITIZE_RETURN, fndecl)) 2033 t = ubsan_instrument_return (loc); 2034 else 2035 { 2036 tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE); 2037 t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0); 2038 } 2039 2040 append_to_statement_list (t, p); 2041 } 2042 2043 void 2044 cp_genericize (tree fndecl) 2045 { 2046 tree t; 2047 2048 /* Fix up the types of parms passed by invisible reference. */ 2049 for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t)) 2050 if (TREE_ADDRESSABLE (TREE_TYPE (t))) 2051 { 2052 /* If a function's arguments are copied to create a thunk, 2053 then DECL_BY_REFERENCE will be set -- but the type of the 2054 argument will be a pointer type, so we will never get 2055 here. */ 2056 gcc_assert (!DECL_BY_REFERENCE (t)); 2057 gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t)); 2058 TREE_TYPE (t) = DECL_ARG_TYPE (t); 2059 DECL_BY_REFERENCE (t) = 1; 2060 TREE_ADDRESSABLE (t) = 0; 2061 relayout_decl (t); 2062 } 2063 2064 /* Do the same for the return value. */ 2065 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl)))) 2066 { 2067 t = DECL_RESULT (fndecl); 2068 TREE_TYPE (t) = build_reference_type (TREE_TYPE (t)); 2069 DECL_BY_REFERENCE (t) = 1; 2070 TREE_ADDRESSABLE (t) = 0; 2071 relayout_decl (t); 2072 if (DECL_NAME (t)) 2073 { 2074 /* Adjust DECL_VALUE_EXPR of the original var. */ 2075 tree outer = outer_curly_brace_block (current_function_decl); 2076 tree var; 2077 2078 if (outer) 2079 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var)) 2080 if (VAR_P (var) 2081 && DECL_NAME (t) == DECL_NAME (var) 2082 && DECL_HAS_VALUE_EXPR_P (var) 2083 && DECL_VALUE_EXPR (var) == t) 2084 { 2085 tree val = convert_from_reference (t); 2086 SET_DECL_VALUE_EXPR (var, val); 2087 break; 2088 } 2089 } 2090 } 2091 2092 /* If we're a clone, the body is already GIMPLE. */ 2093 if (DECL_CLONED_FUNCTION_P (fndecl)) 2094 return; 2095 2096 /* Allow cp_genericize calls to be nested. */ 2097 tree save_bc_label[2]; 2098 save_bc_label[bc_break] = bc_label[bc_break]; 2099 save_bc_label[bc_continue] = bc_label[bc_continue]; 2100 bc_label[bc_break] = NULL_TREE; 2101 bc_label[bc_continue] = NULL_TREE; 2102 2103 /* We do want to see every occurrence of the parms, so we can't just use 2104 walk_tree's hash functionality. */ 2105 cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true); 2106 2107 cp_maybe_instrument_return (fndecl); 2108 2109 /* Do everything else. */ 2110 c_genericize (fndecl); 2111 2112 gcc_assert (bc_label[bc_break] == NULL); 2113 gcc_assert (bc_label[bc_continue] == NULL); 2114 bc_label[bc_break] = save_bc_label[bc_break]; 2115 bc_label[bc_continue] = save_bc_label[bc_continue]; 2116 } 2117 2118 /* Build code to apply FN to each member of ARG1 and ARG2. FN may be 2119 NULL if there is in fact nothing to do. ARG2 may be null if FN 2120 actually only takes one argument. */ 2121 2122 static tree 2123 cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2) 2124 { 2125 tree defparm, parm, t; 2126 int i = 0; 2127 int nargs; 2128 tree *argarray; 2129 2130 if (fn == NULL) 2131 return NULL; 2132 2133 nargs = list_length (DECL_ARGUMENTS (fn)); 2134 argarray = XALLOCAVEC (tree, nargs); 2135 2136 defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn))); 2137 if (arg2) 2138 defparm = TREE_CHAIN (defparm); 2139 2140 bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE; 2141 if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE) 2142 { 2143 tree inner_type = TREE_TYPE (arg1); 2144 tree start1, end1, p1; 2145 tree start2 = NULL, p2 = NULL; 2146 tree ret = NULL, lab; 2147 2148 start1 = arg1; 2149 start2 = arg2; 2150 do 2151 { 2152 inner_type = TREE_TYPE (inner_type); 2153 start1 = build4 (ARRAY_REF, inner_type, start1, 2154 size_zero_node, NULL, NULL); 2155 if (arg2) 2156 start2 = build4 (ARRAY_REF, inner_type, start2, 2157 size_zero_node, NULL, NULL); 2158 } 2159 while (TREE_CODE (inner_type) == ARRAY_TYPE); 2160 start1 = build_fold_addr_expr_loc (input_location, start1); 2161 if (arg2) 2162 start2 = build_fold_addr_expr_loc (input_location, start2); 2163 2164 end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1)); 2165 end1 = fold_build_pointer_plus (start1, end1); 2166 2167 p1 = create_tmp_var (TREE_TYPE (start1)); 2168 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1); 2169 append_to_statement_list (t, &ret); 2170 2171 if (arg2) 2172 { 2173 p2 = create_tmp_var (TREE_TYPE (start2)); 2174 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2); 2175 append_to_statement_list (t, &ret); 2176 } 2177 2178 lab = create_artificial_label (input_location); 2179 t = build1 (LABEL_EXPR, void_type_node, lab); 2180 append_to_statement_list (t, &ret); 2181 2182 argarray[i++] = p1; 2183 if (arg2) 2184 argarray[i++] = p2; 2185 /* Handle default arguments. */ 2186 for (parm = defparm; parm && parm != void_list_node; 2187 parm = TREE_CHAIN (parm), i++) 2188 argarray[i] = convert_default_arg (TREE_VALUE (parm), 2189 TREE_PURPOSE (parm), fn, 2190 i - is_method, tf_warning_or_error); 2191 t = build_call_a (fn, i, argarray); 2192 t = fold_convert (void_type_node, t); 2193 t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 2194 append_to_statement_list (t, &ret); 2195 2196 t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type)); 2197 t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t); 2198 append_to_statement_list (t, &ret); 2199 2200 if (arg2) 2201 { 2202 t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type)); 2203 t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t); 2204 append_to_statement_list (t, &ret); 2205 } 2206 2207 t = build2 (NE_EXPR, boolean_type_node, p1, end1); 2208 t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL); 2209 append_to_statement_list (t, &ret); 2210 2211 return ret; 2212 } 2213 else 2214 { 2215 argarray[i++] = build_fold_addr_expr_loc (input_location, arg1); 2216 if (arg2) 2217 argarray[i++] = build_fold_addr_expr_loc (input_location, arg2); 2218 /* Handle default arguments. */ 2219 for (parm = defparm; parm && parm != void_list_node; 2220 parm = TREE_CHAIN (parm), i++) 2221 argarray[i] = convert_default_arg (TREE_VALUE (parm), 2222 TREE_PURPOSE (parm), fn, 2223 i - is_method, tf_warning_or_error); 2224 t = build_call_a (fn, i, argarray); 2225 t = fold_convert (void_type_node, t); 2226 return fold_build_cleanup_point_expr (TREE_TYPE (t), t); 2227 } 2228 } 2229 2230 /* Return code to initialize DECL with its default constructor, or 2231 NULL if there's nothing to do. */ 2232 2233 tree 2234 cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/) 2235 { 2236 tree info = CP_OMP_CLAUSE_INFO (clause); 2237 tree ret = NULL; 2238 2239 if (info) 2240 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL); 2241 2242 return ret; 2243 } 2244 2245 /* Return code to initialize DST with a copy constructor from SRC. */ 2246 2247 tree 2248 cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src) 2249 { 2250 tree info = CP_OMP_CLAUSE_INFO (clause); 2251 tree ret = NULL; 2252 2253 if (info) 2254 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src); 2255 if (ret == NULL) 2256 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); 2257 2258 return ret; 2259 } 2260 2261 /* Similarly, except use an assignment operator instead. */ 2262 2263 tree 2264 cxx_omp_clause_assign_op (tree clause, tree dst, tree src) 2265 { 2266 tree info = CP_OMP_CLAUSE_INFO (clause); 2267 tree ret = NULL; 2268 2269 if (info) 2270 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src); 2271 if (ret == NULL) 2272 ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); 2273 2274 return ret; 2275 } 2276 2277 /* Return code to destroy DECL. */ 2278 2279 tree 2280 cxx_omp_clause_dtor (tree clause, tree decl) 2281 { 2282 tree info = CP_OMP_CLAUSE_INFO (clause); 2283 tree ret = NULL; 2284 2285 if (info) 2286 ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL); 2287 2288 return ret; 2289 } 2290 2291 /* True if OpenMP should privatize what this DECL points to rather 2292 than the DECL itself. */ 2293 2294 bool 2295 cxx_omp_privatize_by_reference (const_tree decl) 2296 { 2297 return (TYPE_REF_P (TREE_TYPE (decl)) 2298 || is_invisiref_parm (decl)); 2299 } 2300 2301 /* Return true if DECL is const qualified var having no mutable member. */ 2302 bool 2303 cxx_omp_const_qual_no_mutable (tree decl) 2304 { 2305 tree type = TREE_TYPE (decl); 2306 if (TYPE_REF_P (type)) 2307 { 2308 if (!is_invisiref_parm (decl)) 2309 return false; 2310 type = TREE_TYPE (type); 2311 2312 if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl)) 2313 { 2314 /* NVR doesn't preserve const qualification of the 2315 variable's type. */ 2316 tree outer = outer_curly_brace_block (current_function_decl); 2317 tree var; 2318 2319 if (outer) 2320 for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var)) 2321 if (VAR_P (var) 2322 && DECL_NAME (decl) == DECL_NAME (var) 2323 && (TYPE_MAIN_VARIANT (type) 2324 == TYPE_MAIN_VARIANT (TREE_TYPE (var)))) 2325 { 2326 if (TYPE_READONLY (TREE_TYPE (var))) 2327 type = TREE_TYPE (var); 2328 break; 2329 } 2330 } 2331 } 2332 2333 if (type == error_mark_node) 2334 return false; 2335 2336 /* Variables with const-qualified type having no mutable member 2337 are predetermined shared. */ 2338 if (TYPE_READONLY (type) && !cp_has_mutable_p (type)) 2339 return true; 2340 2341 return false; 2342 } 2343 2344 /* True if OpenMP sharing attribute of DECL is predetermined. */ 2345 2346 enum omp_clause_default_kind 2347 cxx_omp_predetermined_sharing_1 (tree decl) 2348 { 2349 /* Static data members are predetermined shared. */ 2350 if (TREE_STATIC (decl)) 2351 { 2352 tree ctx = CP_DECL_CONTEXT (decl); 2353 if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx)) 2354 return OMP_CLAUSE_DEFAULT_SHARED; 2355 2356 if (c_omp_predefined_variable (decl)) 2357 return OMP_CLAUSE_DEFAULT_SHARED; 2358 } 2359 2360 /* this may not be specified in data-sharing clauses, still we need 2361 to predetermined it firstprivate. */ 2362 if (decl == current_class_ptr) 2363 return OMP_CLAUSE_DEFAULT_FIRSTPRIVATE; 2364 2365 return OMP_CLAUSE_DEFAULT_UNSPECIFIED; 2366 } 2367 2368 /* Likewise, but also include the artificial vars. We don't want to 2369 disallow the artificial vars being mentioned in explicit clauses, 2370 as we use artificial vars e.g. for loop constructs with random 2371 access iterators other than pointers, but during gimplification 2372 we want to treat them as predetermined. */ 2373 2374 enum omp_clause_default_kind 2375 cxx_omp_predetermined_sharing (tree decl) 2376 { 2377 enum omp_clause_default_kind ret = cxx_omp_predetermined_sharing_1 (decl); 2378 if (ret != OMP_CLAUSE_DEFAULT_UNSPECIFIED) 2379 return ret; 2380 2381 /* Predetermine artificial variables holding integral values, those 2382 are usually result of gimplify_one_sizepos or SAVE_EXPR 2383 gimplification. */ 2384 if (VAR_P (decl) 2385 && DECL_ARTIFICIAL (decl) 2386 && INTEGRAL_TYPE_P (TREE_TYPE (decl)) 2387 && !(DECL_LANG_SPECIFIC (decl) 2388 && DECL_OMP_PRIVATIZED_MEMBER (decl))) 2389 return OMP_CLAUSE_DEFAULT_SHARED; 2390 2391 /* Similarly for typeinfo symbols. */ 2392 if (VAR_P (decl) && DECL_ARTIFICIAL (decl) && DECL_TINFO_P (decl)) 2393 return OMP_CLAUSE_DEFAULT_SHARED; 2394 2395 return OMP_CLAUSE_DEFAULT_UNSPECIFIED; 2396 } 2397 2398 /* Finalize an implicitly determined clause. */ 2399 2400 void 2401 cxx_omp_finish_clause (tree c, gimple_seq *) 2402 { 2403 tree decl, inner_type; 2404 bool make_shared = false; 2405 2406 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE 2407 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LASTPRIVATE 2408 || !OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))) 2409 return; 2410 2411 decl = OMP_CLAUSE_DECL (c); 2412 decl = require_complete_type (decl); 2413 inner_type = TREE_TYPE (decl); 2414 if (decl == error_mark_node) 2415 make_shared = true; 2416 else if (TYPE_REF_P (TREE_TYPE (decl))) 2417 inner_type = TREE_TYPE (inner_type); 2418 2419 /* We're interested in the base element, not arrays. */ 2420 while (TREE_CODE (inner_type) == ARRAY_TYPE) 2421 inner_type = TREE_TYPE (inner_type); 2422 2423 /* Check for special function availability by building a call to one. 2424 Save the results, because later we won't be in the right context 2425 for making these queries. */ 2426 bool first = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE; 2427 if (!make_shared 2428 && CLASS_TYPE_P (inner_type) 2429 && cxx_omp_create_clause_info (c, inner_type, !first, first, !first, 2430 true)) 2431 make_shared = true; 2432 2433 if (make_shared) 2434 { 2435 OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED; 2436 OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0; 2437 OMP_CLAUSE_SHARED_READONLY (c) = 0; 2438 } 2439 } 2440 2441 /* Return true if DECL's DECL_VALUE_EXPR (if any) should be 2442 disregarded in OpenMP construct, because it is going to be 2443 remapped during OpenMP lowering. SHARED is true if DECL 2444 is going to be shared, false if it is going to be privatized. */ 2445 2446 bool 2447 cxx_omp_disregard_value_expr (tree decl, bool shared) 2448 { 2449 if (shared) 2450 return false; 2451 if (VAR_P (decl) 2452 && DECL_HAS_VALUE_EXPR_P (decl) 2453 && DECL_ARTIFICIAL (decl) 2454 && DECL_LANG_SPECIFIC (decl) 2455 && DECL_OMP_PRIVATIZED_MEMBER (decl)) 2456 return true; 2457 if (VAR_P (decl) && DECL_CONTEXT (decl) && is_capture_proxy (decl)) 2458 return true; 2459 return false; 2460 } 2461 2462 /* Fold expression X which is used as an rvalue if RVAL is true. */ 2463 2464 tree 2465 cp_fold_maybe_rvalue (tree x, bool rval) 2466 { 2467 while (true) 2468 { 2469 x = cp_fold (x); 2470 if (rval) 2471 x = mark_rvalue_use (x); 2472 if (rval && DECL_P (x) 2473 && !TYPE_REF_P (TREE_TYPE (x))) 2474 { 2475 tree v = decl_constant_value (x); 2476 if (v != x && v != error_mark_node) 2477 { 2478 x = v; 2479 continue; 2480 } 2481 } 2482 break; 2483 } 2484 return x; 2485 } 2486 2487 /* Fold expression X which is used as an rvalue. */ 2488 2489 tree 2490 cp_fold_rvalue (tree x) 2491 { 2492 return cp_fold_maybe_rvalue (x, true); 2493 } 2494 2495 /* Perform folding on expression X. */ 2496 2497 tree 2498 cp_fully_fold (tree x) 2499 { 2500 if (processing_template_decl) 2501 return x; 2502 /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't 2503 have to call both. */ 2504 if (cxx_dialect >= cxx11) 2505 { 2506 x = maybe_constant_value (x); 2507 /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into 2508 a TARGET_EXPR; undo that here. */ 2509 if (TREE_CODE (x) == TARGET_EXPR) 2510 x = TARGET_EXPR_INITIAL (x); 2511 else if (TREE_CODE (x) == VIEW_CONVERT_EXPR 2512 && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR 2513 && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x)) 2514 x = TREE_OPERAND (x, 0); 2515 } 2516 return cp_fold_rvalue (x); 2517 } 2518 2519 /* Likewise, but also fold recursively, which cp_fully_fold doesn't perform 2520 in some cases. */ 2521 2522 tree 2523 cp_fully_fold_init (tree x) 2524 { 2525 if (processing_template_decl) 2526 return x; 2527 x = cp_fully_fold (x); 2528 hash_set<tree> pset; 2529 cp_walk_tree (&x, cp_fold_r, &pset, NULL); 2530 return x; 2531 } 2532 2533 /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer 2534 and certain changes are made to the folding done. Or should be (FIXME). We 2535 never touch maybe_const, as it is only used for the C front-end 2536 C_MAYBE_CONST_EXPR. */ 2537 2538 tree 2539 c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval) 2540 { 2541 return cp_fold_maybe_rvalue (x, !lval); 2542 } 2543 2544 static GTY((deletable)) hash_map<tree, tree> *fold_cache; 2545 2546 /* Dispose of the whole FOLD_CACHE. */ 2547 2548 void 2549 clear_fold_cache (void) 2550 { 2551 if (fold_cache != NULL) 2552 fold_cache->empty (); 2553 } 2554 2555 /* This function tries to fold an expression X. 2556 To avoid combinatorial explosion, folding results are kept in fold_cache. 2557 If X is invalid, we don't fold at all. 2558 For performance reasons we don't cache expressions representing a 2559 declaration or constant. 2560 Function returns X or its folded variant. */ 2561 2562 static tree 2563 cp_fold (tree x) 2564 { 2565 tree op0, op1, op2, op3; 2566 tree org_x = x, r = NULL_TREE; 2567 enum tree_code code; 2568 location_t loc; 2569 bool rval_ops = true; 2570 2571 if (!x || x == error_mark_node) 2572 return x; 2573 2574 if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node)) 2575 return x; 2576 2577 /* Don't bother to cache DECLs or constants. */ 2578 if (DECL_P (x) || CONSTANT_CLASS_P (x)) 2579 return x; 2580 2581 if (fold_cache == NULL) 2582 fold_cache = hash_map<tree, tree>::create_ggc (101); 2583 2584 if (tree *cached = fold_cache->get (x)) 2585 return *cached; 2586 2587 code = TREE_CODE (x); 2588 switch (code) 2589 { 2590 case CLEANUP_POINT_EXPR: 2591 /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side 2592 effects. */ 2593 r = cp_fold_rvalue (TREE_OPERAND (x, 0)); 2594 if (!TREE_SIDE_EFFECTS (r)) 2595 x = r; 2596 break; 2597 2598 case SIZEOF_EXPR: 2599 x = fold_sizeof_expr (x); 2600 break; 2601 2602 case VIEW_CONVERT_EXPR: 2603 rval_ops = false; 2604 /* FALLTHRU */ 2605 case CONVERT_EXPR: 2606 case NOP_EXPR: 2607 case NON_LVALUE_EXPR: 2608 2609 if (VOID_TYPE_P (TREE_TYPE (x))) 2610 { 2611 /* This is just to make sure we don't end up with casts to 2612 void from error_mark_node. If we just return x, then 2613 cp_fold_r might fold the operand into error_mark_node and 2614 leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION 2615 during gimplification doesn't like such casts. 2616 Don't create a new tree if op0 != TREE_OPERAND (x, 0), the 2617 folding of the operand should be in the caches and if in cp_fold_r 2618 it will modify it in place. */ 2619 op0 = cp_fold (TREE_OPERAND (x, 0)); 2620 if (op0 == error_mark_node) 2621 x = error_mark_node; 2622 break; 2623 } 2624 2625 loc = EXPR_LOCATION (x); 2626 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops); 2627 2628 if (code == CONVERT_EXPR 2629 && SCALAR_TYPE_P (TREE_TYPE (x)) 2630 && op0 != void_node) 2631 /* During parsing we used convert_to_*_nofold; re-convert now using the 2632 folding variants, since fold() doesn't do those transformations. */ 2633 x = fold (convert (TREE_TYPE (x), op0)); 2634 else if (op0 != TREE_OPERAND (x, 0)) 2635 { 2636 if (op0 == error_mark_node) 2637 x = error_mark_node; 2638 else 2639 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0); 2640 } 2641 else 2642 x = fold (x); 2643 2644 /* Conversion of an out-of-range value has implementation-defined 2645 behavior; the language considers it different from arithmetic 2646 overflow, which is undefined. */ 2647 if (TREE_CODE (op0) == INTEGER_CST 2648 && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0)) 2649 TREE_OVERFLOW (x) = false; 2650 2651 break; 2652 2653 case INDIRECT_REF: 2654 /* We don't need the decltype(auto) obfuscation anymore. */ 2655 if (REF_PARENTHESIZED_P (x)) 2656 { 2657 tree p = maybe_undo_parenthesized_ref (x); 2658 return cp_fold (p); 2659 } 2660 goto unary; 2661 2662 case ADDR_EXPR: 2663 loc = EXPR_LOCATION (x); 2664 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false); 2665 2666 /* Cope with user tricks that amount to offsetof. */ 2667 if (op0 != error_mark_node 2668 && !FUNC_OR_METHOD_TYPE_P (TREE_TYPE (op0))) 2669 { 2670 tree val = get_base_address (op0); 2671 if (val 2672 && INDIRECT_REF_P (val) 2673 && COMPLETE_TYPE_P (TREE_TYPE (val)) 2674 && TREE_CONSTANT (TREE_OPERAND (val, 0))) 2675 { 2676 val = TREE_OPERAND (val, 0); 2677 STRIP_NOPS (val); 2678 val = maybe_constant_value (val); 2679 if (TREE_CODE (val) == INTEGER_CST) 2680 return fold_offsetof (op0, TREE_TYPE (x)); 2681 } 2682 } 2683 goto finish_unary; 2684 2685 case REALPART_EXPR: 2686 case IMAGPART_EXPR: 2687 rval_ops = false; 2688 /* FALLTHRU */ 2689 case CONJ_EXPR: 2690 case FIX_TRUNC_EXPR: 2691 case FLOAT_EXPR: 2692 case NEGATE_EXPR: 2693 case ABS_EXPR: 2694 case ABSU_EXPR: 2695 case BIT_NOT_EXPR: 2696 case TRUTH_NOT_EXPR: 2697 case FIXED_CONVERT_EXPR: 2698 unary: 2699 2700 loc = EXPR_LOCATION (x); 2701 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops); 2702 2703 finish_unary: 2704 if (op0 != TREE_OPERAND (x, 0)) 2705 { 2706 if (op0 == error_mark_node) 2707 x = error_mark_node; 2708 else 2709 { 2710 x = fold_build1_loc (loc, code, TREE_TYPE (x), op0); 2711 if (code == INDIRECT_REF 2712 && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF)) 2713 { 2714 TREE_READONLY (x) = TREE_READONLY (org_x); 2715 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x); 2716 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x); 2717 } 2718 } 2719 } 2720 else 2721 x = fold (x); 2722 2723 gcc_assert (TREE_CODE (x) != COND_EXPR 2724 || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0)))); 2725 break; 2726 2727 case UNARY_PLUS_EXPR: 2728 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0)); 2729 if (op0 == error_mark_node) 2730 x = error_mark_node; 2731 else 2732 x = fold_convert (TREE_TYPE (x), op0); 2733 break; 2734 2735 case POSTDECREMENT_EXPR: 2736 case POSTINCREMENT_EXPR: 2737 case INIT_EXPR: 2738 case PREDECREMENT_EXPR: 2739 case PREINCREMENT_EXPR: 2740 case COMPOUND_EXPR: 2741 case MODIFY_EXPR: 2742 rval_ops = false; 2743 /* FALLTHRU */ 2744 case POINTER_PLUS_EXPR: 2745 case PLUS_EXPR: 2746 case POINTER_DIFF_EXPR: 2747 case MINUS_EXPR: 2748 case MULT_EXPR: 2749 case TRUNC_DIV_EXPR: 2750 case CEIL_DIV_EXPR: 2751 case FLOOR_DIV_EXPR: 2752 case ROUND_DIV_EXPR: 2753 case TRUNC_MOD_EXPR: 2754 case CEIL_MOD_EXPR: 2755 case ROUND_MOD_EXPR: 2756 case RDIV_EXPR: 2757 case EXACT_DIV_EXPR: 2758 case MIN_EXPR: 2759 case MAX_EXPR: 2760 case LSHIFT_EXPR: 2761 case RSHIFT_EXPR: 2762 case LROTATE_EXPR: 2763 case RROTATE_EXPR: 2764 case BIT_AND_EXPR: 2765 case BIT_IOR_EXPR: 2766 case BIT_XOR_EXPR: 2767 case TRUTH_AND_EXPR: 2768 case TRUTH_ANDIF_EXPR: 2769 case TRUTH_OR_EXPR: 2770 case TRUTH_ORIF_EXPR: 2771 case TRUTH_XOR_EXPR: 2772 case LT_EXPR: case LE_EXPR: 2773 case GT_EXPR: case GE_EXPR: 2774 case EQ_EXPR: case NE_EXPR: 2775 case UNORDERED_EXPR: case ORDERED_EXPR: 2776 case UNLT_EXPR: case UNLE_EXPR: 2777 case UNGT_EXPR: case UNGE_EXPR: 2778 case UNEQ_EXPR: case LTGT_EXPR: 2779 case RANGE_EXPR: case COMPLEX_EXPR: 2780 2781 loc = EXPR_LOCATION (x); 2782 op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops); 2783 op1 = cp_fold_rvalue (TREE_OPERAND (x, 1)); 2784 2785 /* decltype(nullptr) has only one value, so optimize away all comparisons 2786 with that type right away, keeping them in the IL causes troubles for 2787 various optimizations. */ 2788 if (COMPARISON_CLASS_P (org_x) 2789 && TREE_CODE (TREE_TYPE (op0)) == NULLPTR_TYPE 2790 && TREE_CODE (TREE_TYPE (op1)) == NULLPTR_TYPE) 2791 { 2792 switch (code) 2793 { 2794 case EQ_EXPR: 2795 case LE_EXPR: 2796 case GE_EXPR: 2797 x = constant_boolean_node (true, TREE_TYPE (x)); 2798 break; 2799 case NE_EXPR: 2800 case LT_EXPR: 2801 case GT_EXPR: 2802 x = constant_boolean_node (false, TREE_TYPE (x)); 2803 break; 2804 default: 2805 gcc_unreachable (); 2806 } 2807 return omit_two_operands_loc (loc, TREE_TYPE (x), x, 2808 op0, op1); 2809 } 2810 2811 if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1)) 2812 { 2813 if (op0 == error_mark_node || op1 == error_mark_node) 2814 x = error_mark_node; 2815 else 2816 x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1); 2817 } 2818 else 2819 x = fold (x); 2820 2821 /* This is only needed for -Wnonnull-compare and only if 2822 TREE_NO_WARNING (org_x), but to avoid that option affecting code 2823 generation, we do it always. */ 2824 if (COMPARISON_CLASS_P (org_x)) 2825 { 2826 if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST) 2827 ; 2828 else if (COMPARISON_CLASS_P (x)) 2829 { 2830 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare) 2831 TREE_NO_WARNING (x) = 1; 2832 } 2833 /* Otherwise give up on optimizing these, let GIMPLE folders 2834 optimize those later on. */ 2835 else if (op0 != TREE_OPERAND (org_x, 0) 2836 || op1 != TREE_OPERAND (org_x, 1)) 2837 { 2838 x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1); 2839 if (TREE_NO_WARNING (org_x) && warn_nonnull_compare) 2840 TREE_NO_WARNING (x) = 1; 2841 } 2842 else 2843 x = org_x; 2844 } 2845 2846 break; 2847 2848 case VEC_COND_EXPR: 2849 case COND_EXPR: 2850 loc = EXPR_LOCATION (x); 2851 op0 = cp_fold_rvalue (TREE_OPERAND (x, 0)); 2852 op1 = cp_fold (TREE_OPERAND (x, 1)); 2853 op2 = cp_fold (TREE_OPERAND (x, 2)); 2854 2855 if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE) 2856 { 2857 warning_sentinel s (warn_int_in_bool_context); 2858 if (!VOID_TYPE_P (TREE_TYPE (op1))) 2859 op1 = cp_truthvalue_conversion (op1, tf_warning_or_error); 2860 if (!VOID_TYPE_P (TREE_TYPE (op2))) 2861 op2 = cp_truthvalue_conversion (op2, tf_warning_or_error); 2862 } 2863 else if (VOID_TYPE_P (TREE_TYPE (x))) 2864 { 2865 if (TREE_CODE (op0) == INTEGER_CST) 2866 { 2867 /* If the condition is constant, fold can fold away 2868 the COND_EXPR. If some statement-level uses of COND_EXPR 2869 have one of the branches NULL, avoid folding crash. */ 2870 if (!op1) 2871 op1 = build_empty_stmt (loc); 2872 if (!op2) 2873 op2 = build_empty_stmt (loc); 2874 } 2875 else 2876 { 2877 /* Otherwise, don't bother folding a void condition, since 2878 it can't produce a constant value. */ 2879 if (op0 != TREE_OPERAND (x, 0) 2880 || op1 != TREE_OPERAND (x, 1) 2881 || op2 != TREE_OPERAND (x, 2)) 2882 x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2); 2883 break; 2884 } 2885 } 2886 2887 if (op0 != TREE_OPERAND (x, 0) 2888 || op1 != TREE_OPERAND (x, 1) 2889 || op2 != TREE_OPERAND (x, 2)) 2890 { 2891 if (op0 == error_mark_node 2892 || op1 == error_mark_node 2893 || op2 == error_mark_node) 2894 x = error_mark_node; 2895 else 2896 x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2); 2897 } 2898 else 2899 x = fold (x); 2900 2901 /* A COND_EXPR might have incompatible types in branches if one or both 2902 arms are bitfields. If folding exposed such a branch, fix it up. */ 2903 if (TREE_CODE (x) != code 2904 && x != error_mark_node 2905 && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x))) 2906 x = fold_convert (TREE_TYPE (org_x), x); 2907 2908 break; 2909 2910 case CALL_EXPR: 2911 { 2912 int i, m, sv = optimize, nw = sv, changed = 0; 2913 tree callee = get_callee_fndecl (x); 2914 2915 /* Some built-in function calls will be evaluated at compile-time in 2916 fold (). Set optimize to 1 when folding __builtin_constant_p inside 2917 a constexpr function so that fold_builtin_1 doesn't fold it to 0. */ 2918 if (callee && fndecl_built_in_p (callee) && !optimize 2919 && DECL_IS_BUILTIN_CONSTANT_P (callee) 2920 && current_function_decl 2921 && DECL_DECLARED_CONSTEXPR_P (current_function_decl)) 2922 nw = 1; 2923 2924 /* Defer folding __builtin_is_constant_evaluated. */ 2925 if (callee 2926 && fndecl_built_in_p (callee, CP_BUILT_IN_IS_CONSTANT_EVALUATED, 2927 BUILT_IN_FRONTEND)) 2928 break; 2929 2930 if (callee 2931 && fndecl_built_in_p (callee, CP_BUILT_IN_SOURCE_LOCATION, 2932 BUILT_IN_FRONTEND)) 2933 { 2934 x = fold_builtin_source_location (EXPR_LOCATION (x)); 2935 break; 2936 } 2937 2938 x = copy_node (x); 2939 2940 m = call_expr_nargs (x); 2941 for (i = 0; i < m; i++) 2942 { 2943 r = cp_fold (CALL_EXPR_ARG (x, i)); 2944 if (r != CALL_EXPR_ARG (x, i)) 2945 { 2946 if (r == error_mark_node) 2947 { 2948 x = error_mark_node; 2949 break; 2950 } 2951 changed = 1; 2952 } 2953 CALL_EXPR_ARG (x, i) = r; 2954 } 2955 if (x == error_mark_node) 2956 break; 2957 2958 optimize = nw; 2959 r = fold (x); 2960 optimize = sv; 2961 2962 if (TREE_CODE (r) != CALL_EXPR) 2963 { 2964 x = cp_fold (r); 2965 break; 2966 } 2967 2968 optimize = nw; 2969 2970 /* Invoke maybe_constant_value for functions declared 2971 constexpr and not called with AGGR_INIT_EXPRs. 2972 TODO: 2973 Do constexpr expansion of expressions where the call itself is not 2974 constant, but the call followed by an INDIRECT_REF is. */ 2975 if (callee && DECL_DECLARED_CONSTEXPR_P (callee) 2976 && !flag_no_inline) 2977 r = maybe_constant_value (x); 2978 optimize = sv; 2979 2980 if (TREE_CODE (r) != CALL_EXPR) 2981 { 2982 if (DECL_CONSTRUCTOR_P (callee)) 2983 { 2984 loc = EXPR_LOCATION (x); 2985 tree s = build_fold_indirect_ref_loc (loc, 2986 CALL_EXPR_ARG (x, 0)); 2987 r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r); 2988 } 2989 x = r; 2990 break; 2991 } 2992 2993 if (!changed) 2994 x = org_x; 2995 break; 2996 } 2997 2998 case CONSTRUCTOR: 2999 { 3000 unsigned i; 3001 constructor_elt *p; 3002 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x); 3003 vec<constructor_elt, va_gc> *nelts = NULL; 3004 FOR_EACH_VEC_SAFE_ELT (elts, i, p) 3005 { 3006 tree op = cp_fold (p->value); 3007 if (op != p->value) 3008 { 3009 if (op == error_mark_node) 3010 { 3011 x = error_mark_node; 3012 vec_free (nelts); 3013 break; 3014 } 3015 if (nelts == NULL) 3016 nelts = elts->copy (); 3017 (*nelts)[i].value = op; 3018 } 3019 } 3020 if (nelts) 3021 { 3022 x = build_constructor (TREE_TYPE (x), nelts); 3023 CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x) 3024 = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x); 3025 } 3026 if (VECTOR_TYPE_P (TREE_TYPE (x))) 3027 x = fold (x); 3028 break; 3029 } 3030 case TREE_VEC: 3031 { 3032 bool changed = false; 3033 releasing_vec vec; 3034 int i, n = TREE_VEC_LENGTH (x); 3035 vec_safe_reserve (vec, n); 3036 3037 for (i = 0; i < n; i++) 3038 { 3039 tree op = cp_fold (TREE_VEC_ELT (x, i)); 3040 vec->quick_push (op); 3041 if (op != TREE_VEC_ELT (x, i)) 3042 changed = true; 3043 } 3044 3045 if (changed) 3046 { 3047 r = copy_node (x); 3048 for (i = 0; i < n; i++) 3049 TREE_VEC_ELT (r, i) = (*vec)[i]; 3050 x = r; 3051 } 3052 } 3053 3054 break; 3055 3056 case ARRAY_REF: 3057 case ARRAY_RANGE_REF: 3058 3059 loc = EXPR_LOCATION (x); 3060 op0 = cp_fold (TREE_OPERAND (x, 0)); 3061 op1 = cp_fold (TREE_OPERAND (x, 1)); 3062 op2 = cp_fold (TREE_OPERAND (x, 2)); 3063 op3 = cp_fold (TREE_OPERAND (x, 3)); 3064 3065 if (op0 != TREE_OPERAND (x, 0) 3066 || op1 != TREE_OPERAND (x, 1) 3067 || op2 != TREE_OPERAND (x, 2) 3068 || op3 != TREE_OPERAND (x, 3)) 3069 { 3070 if (op0 == error_mark_node 3071 || op1 == error_mark_node 3072 || op2 == error_mark_node 3073 || op3 == error_mark_node) 3074 x = error_mark_node; 3075 else 3076 { 3077 x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3); 3078 TREE_READONLY (x) = TREE_READONLY (org_x); 3079 TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x); 3080 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x); 3081 } 3082 } 3083 3084 x = fold (x); 3085 break; 3086 3087 case SAVE_EXPR: 3088 /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after 3089 folding, evaluates to an invariant. In that case no need to wrap 3090 this folded tree with a SAVE_EXPR. */ 3091 r = cp_fold (TREE_OPERAND (x, 0)); 3092 if (tree_invariant_p (r)) 3093 x = r; 3094 break; 3095 3096 default: 3097 return org_x; 3098 } 3099 3100 if (EXPR_P (x) && TREE_CODE (x) == code) 3101 { 3102 TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x); 3103 TREE_NO_WARNING (x) = TREE_NO_WARNING (org_x); 3104 } 3105 3106 fold_cache->put (org_x, x); 3107 /* Prevent that we try to fold an already folded result again. */ 3108 if (x != org_x) 3109 fold_cache->put (x, x); 3110 3111 return x; 3112 } 3113 3114 /* Look up either "hot" or "cold" in attribute list LIST. */ 3115 3116 tree 3117 lookup_hotness_attribute (tree list) 3118 { 3119 for (; list; list = TREE_CHAIN (list)) 3120 { 3121 tree name = get_attribute_name (list); 3122 if (is_attribute_p ("hot", name) 3123 || is_attribute_p ("cold", name) 3124 || is_attribute_p ("likely", name) 3125 || is_attribute_p ("unlikely", name)) 3126 break; 3127 } 3128 return list; 3129 } 3130 3131 /* Remove both "hot" and "cold" attributes from LIST. */ 3132 3133 static tree 3134 remove_hotness_attribute (tree list) 3135 { 3136 list = remove_attribute ("hot", list); 3137 list = remove_attribute ("cold", list); 3138 list = remove_attribute ("likely", list); 3139 list = remove_attribute ("unlikely", list); 3140 return list; 3141 } 3142 3143 /* If [[likely]] or [[unlikely]] appear on this statement, turn it into a 3144 PREDICT_EXPR. */ 3145 3146 tree 3147 process_stmt_hotness_attribute (tree std_attrs, location_t attrs_loc) 3148 { 3149 if (std_attrs == error_mark_node) 3150 return std_attrs; 3151 if (tree attr = lookup_hotness_attribute (std_attrs)) 3152 { 3153 tree name = get_attribute_name (attr); 3154 bool hot = (is_attribute_p ("hot", name) 3155 || is_attribute_p ("likely", name)); 3156 tree pred = build_predict_expr (hot ? PRED_HOT_LABEL : PRED_COLD_LABEL, 3157 hot ? TAKEN : NOT_TAKEN); 3158 SET_EXPR_LOCATION (pred, attrs_loc); 3159 add_stmt (pred); 3160 if (tree other = lookup_hotness_attribute (TREE_CHAIN (attr))) 3161 warning (OPT_Wattributes, "ignoring attribute %qE after earlier %qE", 3162 get_attribute_name (other), name); 3163 std_attrs = remove_hotness_attribute (std_attrs); 3164 } 3165 return std_attrs; 3166 } 3167 3168 /* Helper of fold_builtin_source_location, return the 3169 std::source_location::__impl type after performing verification 3170 on it. LOC is used for reporting any errors. */ 3171 3172 static tree 3173 get_source_location_impl_type (location_t loc) 3174 { 3175 tree name = get_identifier ("source_location"); 3176 tree decl = lookup_qualified_name (std_node, name); 3177 if (TREE_CODE (decl) != TYPE_DECL) 3178 { 3179 auto_diagnostic_group d; 3180 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST) 3181 qualified_name_lookup_error (std_node, name, decl, loc); 3182 else 3183 error_at (loc, "%qD is not a type", decl); 3184 return error_mark_node; 3185 } 3186 name = get_identifier ("__impl"); 3187 tree type = TREE_TYPE (decl); 3188 decl = lookup_qualified_name (type, name); 3189 if (TREE_CODE (decl) != TYPE_DECL) 3190 { 3191 auto_diagnostic_group d; 3192 if (decl == error_mark_node || TREE_CODE (decl) == TREE_LIST) 3193 qualified_name_lookup_error (type, name, decl, loc); 3194 else 3195 error_at (loc, "%qD is not a type", decl); 3196 return error_mark_node; 3197 } 3198 type = TREE_TYPE (decl); 3199 if (TREE_CODE (type) != RECORD_TYPE) 3200 { 3201 error_at (loc, "%qD is not a class type", decl); 3202 return error_mark_node; 3203 } 3204 3205 int cnt = 0; 3206 for (tree field = TYPE_FIELDS (type); 3207 (field = next_initializable_field (field)) != NULL_TREE; 3208 field = DECL_CHAIN (field)) 3209 { 3210 if (DECL_NAME (field) != NULL_TREE) 3211 { 3212 const char *n = IDENTIFIER_POINTER (DECL_NAME (field)); 3213 if (strcmp (n, "_M_file_name") == 0 3214 || strcmp (n, "_M_function_name") == 0) 3215 { 3216 if (TREE_TYPE (field) != const_string_type_node) 3217 { 3218 error_at (loc, "%qD does not have %<const char *%> type", 3219 field); 3220 return error_mark_node; 3221 } 3222 cnt++; 3223 continue; 3224 } 3225 else if (strcmp (n, "_M_line") == 0 || strcmp (n, "_M_column") == 0) 3226 { 3227 if (TREE_CODE (TREE_TYPE (field)) != INTEGER_TYPE) 3228 { 3229 error_at (loc, "%qD does not have integral type", field); 3230 return error_mark_node; 3231 } 3232 cnt++; 3233 continue; 3234 } 3235 } 3236 cnt = 0; 3237 break; 3238 } 3239 if (cnt != 4) 3240 { 3241 error_at (loc, "%<std::source_location::__impl%> does not contain only " 3242 "non-static data members %<_M_file_name%>, " 3243 "%<_M_function_name%>, %<_M_line%> and %<_M_column%>"); 3244 return error_mark_node; 3245 } 3246 return build_qualified_type (type, TYPE_QUAL_CONST); 3247 } 3248 3249 /* Type for source_location_table hash_set. */ 3250 struct GTY((for_user)) source_location_table_entry { 3251 location_t loc; 3252 unsigned uid; 3253 tree var; 3254 }; 3255 3256 /* Traits class for function start hash maps below. */ 3257 3258 struct source_location_table_entry_hash 3259 : ggc_remove <source_location_table_entry> 3260 { 3261 typedef source_location_table_entry value_type; 3262 typedef source_location_table_entry compare_type; 3263 3264 static hashval_t 3265 hash (const source_location_table_entry &ref) 3266 { 3267 inchash::hash hstate (0); 3268 hstate.add_int (ref.loc); 3269 hstate.add_int (ref.uid); 3270 return hstate.end (); 3271 } 3272 3273 static bool 3274 equal (const source_location_table_entry &ref1, 3275 const source_location_table_entry &ref2) 3276 { 3277 return ref1.loc == ref2.loc && ref1.uid == ref2.uid; 3278 } 3279 3280 static void 3281 mark_deleted (source_location_table_entry &ref) 3282 { 3283 ref.loc = UNKNOWN_LOCATION; 3284 ref.uid = -1U; 3285 ref.var = NULL_TREE; 3286 } 3287 3288 static const bool empty_zero_p = true; 3289 3290 static void 3291 mark_empty (source_location_table_entry &ref) 3292 { 3293 ref.loc = UNKNOWN_LOCATION; 3294 ref.uid = 0; 3295 ref.var = NULL_TREE; 3296 } 3297 3298 static bool 3299 is_deleted (const source_location_table_entry &ref) 3300 { 3301 return (ref.loc == UNKNOWN_LOCATION 3302 && ref.uid == -1U 3303 && ref.var == NULL_TREE); 3304 } 3305 3306 static bool 3307 is_empty (const source_location_table_entry &ref) 3308 { 3309 return (ref.loc == UNKNOWN_LOCATION 3310 && ref.uid == 0 3311 && ref.var == NULL_TREE); 3312 } 3313 }; 3314 3315 static GTY(()) hash_table <source_location_table_entry_hash> 3316 *source_location_table; 3317 static GTY(()) unsigned int source_location_id; 3318 3319 /* Fold __builtin_source_location () call. LOC is the location 3320 of the call. */ 3321 3322 tree 3323 fold_builtin_source_location (location_t loc) 3324 { 3325 if (source_location_impl == NULL_TREE) 3326 { 3327 auto_diagnostic_group d; 3328 source_location_impl = get_source_location_impl_type (loc); 3329 if (source_location_impl == error_mark_node) 3330 inform (loc, "evaluating %qs", "__builtin_source_location"); 3331 } 3332 if (source_location_impl == error_mark_node) 3333 return build_zero_cst (const_ptr_type_node); 3334 if (source_location_table == NULL) 3335 source_location_table 3336 = hash_table <source_location_table_entry_hash>::create_ggc (64); 3337 const line_map_ordinary *map; 3338 source_location_table_entry entry; 3339 entry.loc 3340 = linemap_resolve_location (line_table, loc, LRK_MACRO_EXPANSION_POINT, 3341 &map); 3342 entry.uid = current_function_decl ? DECL_UID (current_function_decl) : -1; 3343 entry.var = error_mark_node; 3344 source_location_table_entry *entryp 3345 = source_location_table->find_slot (entry, INSERT); 3346 tree var; 3347 if (entryp->var) 3348 var = entryp->var; 3349 else 3350 { 3351 char tmp_name[32]; 3352 ASM_GENERATE_INTERNAL_LABEL (tmp_name, "Lsrc_loc", source_location_id++); 3353 var = build_decl (loc, VAR_DECL, get_identifier (tmp_name), 3354 source_location_impl); 3355 TREE_STATIC (var) = 1; 3356 TREE_PUBLIC (var) = 0; 3357 DECL_ARTIFICIAL (var) = 1; 3358 DECL_IGNORED_P (var) = 1; 3359 DECL_EXTERNAL (var) = 0; 3360 DECL_DECLARED_CONSTEXPR_P (var) = 1; 3361 DECL_INITIALIZED_BY_CONSTANT_EXPRESSION_P (var) = 1; 3362 layout_decl (var, 0); 3363 3364 vec<constructor_elt, va_gc> *v = NULL; 3365 vec_alloc (v, 4); 3366 for (tree field = TYPE_FIELDS (source_location_impl); 3367 (field = next_initializable_field (field)) != NULL_TREE; 3368 field = DECL_CHAIN (field)) 3369 { 3370 const char *n = IDENTIFIER_POINTER (DECL_NAME (field)); 3371 tree val = NULL_TREE; 3372 if (strcmp (n, "_M_file_name") == 0) 3373 { 3374 if (const char *fname = LOCATION_FILE (loc)) 3375 { 3376 fname = remap_macro_filename (fname); 3377 val = build_string_literal (strlen (fname) + 1, fname); 3378 } 3379 else 3380 val = build_string_literal (1, ""); 3381 } 3382 else if (strcmp (n, "_M_function_name") == 0) 3383 { 3384 const char *name = ""; 3385 3386 if (current_function_decl) 3387 name = cxx_printable_name (current_function_decl, 0); 3388 3389 val = build_string_literal (strlen (name) + 1, name); 3390 } 3391 else if (strcmp (n, "_M_line") == 0) 3392 val = build_int_cst (TREE_TYPE (field), LOCATION_LINE (loc)); 3393 else if (strcmp (n, "_M_column") == 0) 3394 val = build_int_cst (TREE_TYPE (field), LOCATION_COLUMN (loc)); 3395 else 3396 gcc_unreachable (); 3397 CONSTRUCTOR_APPEND_ELT (v, field, val); 3398 } 3399 3400 tree ctor = build_constructor (source_location_impl, v); 3401 TREE_CONSTANT (ctor) = 1; 3402 TREE_STATIC (ctor) = 1; 3403 DECL_INITIAL (var) = ctor; 3404 varpool_node::finalize_decl (var); 3405 *entryp = entry; 3406 entryp->var = var; 3407 } 3408 3409 return build_fold_addr_expr_with_type_loc (loc, var, const_ptr_type_node); 3410 } 3411 3412 #include "gt-cp-cp-gimplify.h" 3413