1*38fd1498Szrj /* C++-specific tree lowering bits; see also c-gimplify.c and tree-gimple.c. 2*38fd1498Szrj 3*38fd1498Szrj Copyright (C) 2002-2018 Free Software Foundation, Inc. 4*38fd1498Szrj Contributed by Jason Merrill <jason@redhat.com> 5*38fd1498Szrj 6*38fd1498Szrj This file is part of GCC. 7*38fd1498Szrj 8*38fd1498Szrj GCC is free software; you can redistribute it and/or modify it under 9*38fd1498Szrj the terms of the GNU General Public License as published by the Free 10*38fd1498Szrj Software Foundation; either version 3, or (at your option) any later 11*38fd1498Szrj version. 12*38fd1498Szrj 13*38fd1498Szrj GCC is distributed in the hope that it will be useful, but WITHOUT ANY 14*38fd1498Szrj WARRANTY; without even the implied warranty of MERCHANTABILITY or 15*38fd1498Szrj FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 16*38fd1498Szrj for more details. 17*38fd1498Szrj 18*38fd1498Szrj You should have received a copy of the GNU General Public License 19*38fd1498Szrj along with GCC; see the file COPYING3. If not see 20*38fd1498Szrj <http://www.gnu.org/licenses/>. */ 21*38fd1498Szrj 22*38fd1498Szrj #include "config.h" 23*38fd1498Szrj #include "system.h" 24*38fd1498Szrj #include "coretypes.h" 25*38fd1498Szrj #include "target.h" 26*38fd1498Szrj #include "basic-block.h" 27*38fd1498Szrj #include "cp-tree.h" 28*38fd1498Szrj #include "gimple.h" 29*38fd1498Szrj #include "predict.h" 30*38fd1498Szrj #include "stor-layout.h" 31*38fd1498Szrj #include "tree-iterator.h" 32*38fd1498Szrj #include "gimplify.h" 33*38fd1498Szrj #include "c-family/c-ubsan.h" 34*38fd1498Szrj #include "stringpool.h" 35*38fd1498Szrj #include "attribs.h" 36*38fd1498Szrj #include "asan.h" 37*38fd1498Szrj 38*38fd1498Szrj /* Forward declarations. */ 39*38fd1498Szrj 40*38fd1498Szrj static tree cp_genericize_r (tree *, int *, void *); 41*38fd1498Szrj static tree cp_fold_r (tree *, int *, void *); 42*38fd1498Szrj static void cp_genericize_tree (tree*, bool); 43*38fd1498Szrj static tree cp_fold (tree); 44*38fd1498Szrj 45*38fd1498Szrj /* Local declarations. */ 46*38fd1498Szrj 47*38fd1498Szrj enum bc_t { bc_break = 0, bc_continue = 1 }; 48*38fd1498Szrj 49*38fd1498Szrj /* Stack of labels which are targets for "break" or "continue", 50*38fd1498Szrj linked through TREE_CHAIN. */ 51*38fd1498Szrj static tree bc_label[2]; 52*38fd1498Szrj 53*38fd1498Szrj /* Begin a scope which can be exited by a break or continue statement. BC 54*38fd1498Szrj indicates which. 55*38fd1498Szrj 56*38fd1498Szrj Just creates a label with location LOCATION and pushes it into the current 57*38fd1498Szrj context. */ 58*38fd1498Szrj 59*38fd1498Szrj static tree 60*38fd1498Szrj begin_bc_block (enum bc_t bc, location_t location) 61*38fd1498Szrj { 62*38fd1498Szrj tree label = create_artificial_label (location); 63*38fd1498Szrj DECL_CHAIN (label) = bc_label[bc]; 64*38fd1498Szrj bc_label[bc] = label; 65*38fd1498Szrj if (bc == bc_break) 66*38fd1498Szrj LABEL_DECL_BREAK (label) = true; 67*38fd1498Szrj else 68*38fd1498Szrj LABEL_DECL_CONTINUE (label) = true; 69*38fd1498Szrj return label; 70*38fd1498Szrj } 71*38fd1498Szrj 72*38fd1498Szrj /* Finish a scope which can be exited by a break or continue statement. 73*38fd1498Szrj LABEL was returned from the most recent call to begin_bc_block. BLOCK is 74*38fd1498Szrj an expression for the contents of the scope. 75*38fd1498Szrj 76*38fd1498Szrj If we saw a break (or continue) in the scope, append a LABEL_EXPR to 77*38fd1498Szrj BLOCK. Otherwise, just forget the label. */ 78*38fd1498Szrj 79*38fd1498Szrj static void 80*38fd1498Szrj finish_bc_block (tree *block, enum bc_t bc, tree label) 81*38fd1498Szrj { 82*38fd1498Szrj gcc_assert (label == bc_label[bc]); 83*38fd1498Szrj 84*38fd1498Szrj if (TREE_USED (label)) 85*38fd1498Szrj append_to_statement_list (build1 (LABEL_EXPR, void_type_node, label), 86*38fd1498Szrj block); 87*38fd1498Szrj 88*38fd1498Szrj bc_label[bc] = DECL_CHAIN (label); 89*38fd1498Szrj DECL_CHAIN (label) = NULL_TREE; 90*38fd1498Szrj } 91*38fd1498Szrj 92*38fd1498Szrj /* Get the LABEL_EXPR to represent a break or continue statement 93*38fd1498Szrj in the current block scope. BC indicates which. */ 94*38fd1498Szrj 95*38fd1498Szrj static tree 96*38fd1498Szrj get_bc_label (enum bc_t bc) 97*38fd1498Szrj { 98*38fd1498Szrj tree label = bc_label[bc]; 99*38fd1498Szrj 100*38fd1498Szrj /* Mark the label used for finish_bc_block. */ 101*38fd1498Szrj TREE_USED (label) = 1; 102*38fd1498Szrj return label; 103*38fd1498Szrj } 104*38fd1498Szrj 105*38fd1498Szrj /* Genericize a TRY_BLOCK. */ 106*38fd1498Szrj 107*38fd1498Szrj static void 108*38fd1498Szrj genericize_try_block (tree *stmt_p) 109*38fd1498Szrj { 110*38fd1498Szrj tree body = TRY_STMTS (*stmt_p); 111*38fd1498Szrj tree cleanup = TRY_HANDLERS (*stmt_p); 112*38fd1498Szrj 113*38fd1498Szrj *stmt_p = build2 (TRY_CATCH_EXPR, void_type_node, body, cleanup); 114*38fd1498Szrj } 115*38fd1498Szrj 116*38fd1498Szrj /* Genericize a HANDLER by converting to a CATCH_EXPR. */ 117*38fd1498Szrj 118*38fd1498Szrj static void 119*38fd1498Szrj genericize_catch_block (tree *stmt_p) 120*38fd1498Szrj { 121*38fd1498Szrj tree type = HANDLER_TYPE (*stmt_p); 122*38fd1498Szrj tree body = HANDLER_BODY (*stmt_p); 123*38fd1498Szrj 124*38fd1498Szrj /* FIXME should the caught type go in TREE_TYPE? */ 125*38fd1498Szrj *stmt_p = build2 (CATCH_EXPR, void_type_node, type, body); 126*38fd1498Szrj } 127*38fd1498Szrj 128*38fd1498Szrj /* A terser interface for building a representation of an exception 129*38fd1498Szrj specification. */ 130*38fd1498Szrj 131*38fd1498Szrj static tree 132*38fd1498Szrj build_gimple_eh_filter_tree (tree body, tree allowed, tree failure) 133*38fd1498Szrj { 134*38fd1498Szrj tree t; 135*38fd1498Szrj 136*38fd1498Szrj /* FIXME should the allowed types go in TREE_TYPE? */ 137*38fd1498Szrj t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE); 138*38fd1498Szrj append_to_statement_list (failure, &EH_FILTER_FAILURE (t)); 139*38fd1498Szrj 140*38fd1498Szrj t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t); 141*38fd1498Szrj append_to_statement_list (body, &TREE_OPERAND (t, 0)); 142*38fd1498Szrj 143*38fd1498Szrj return t; 144*38fd1498Szrj } 145*38fd1498Szrj 146*38fd1498Szrj /* Genericize an EH_SPEC_BLOCK by converting it to a 147*38fd1498Szrj TRY_CATCH_EXPR/EH_FILTER_EXPR pair. */ 148*38fd1498Szrj 149*38fd1498Szrj static void 150*38fd1498Szrj genericize_eh_spec_block (tree *stmt_p) 151*38fd1498Szrj { 152*38fd1498Szrj tree body = EH_SPEC_STMTS (*stmt_p); 153*38fd1498Szrj tree allowed = EH_SPEC_RAISES (*stmt_p); 154*38fd1498Szrj tree failure = build_call_n (call_unexpected_fn, 1, build_exc_ptr ()); 155*38fd1498Szrj 156*38fd1498Szrj *stmt_p = build_gimple_eh_filter_tree (body, allowed, failure); 157*38fd1498Szrj TREE_NO_WARNING (*stmt_p) = true; 158*38fd1498Szrj TREE_NO_WARNING (TREE_OPERAND (*stmt_p, 1)) = true; 159*38fd1498Szrj } 160*38fd1498Szrj 161*38fd1498Szrj /* Genericize an IF_STMT by turning it into a COND_EXPR. */ 162*38fd1498Szrj 163*38fd1498Szrj static void 164*38fd1498Szrj genericize_if_stmt (tree *stmt_p) 165*38fd1498Szrj { 166*38fd1498Szrj tree stmt, cond, then_, else_; 167*38fd1498Szrj location_t locus = EXPR_LOCATION (*stmt_p); 168*38fd1498Szrj 169*38fd1498Szrj stmt = *stmt_p; 170*38fd1498Szrj cond = IF_COND (stmt); 171*38fd1498Szrj then_ = THEN_CLAUSE (stmt); 172*38fd1498Szrj else_ = ELSE_CLAUSE (stmt); 173*38fd1498Szrj 174*38fd1498Szrj if (!then_) 175*38fd1498Szrj then_ = build_empty_stmt (locus); 176*38fd1498Szrj if (!else_) 177*38fd1498Szrj else_ = build_empty_stmt (locus); 178*38fd1498Szrj 179*38fd1498Szrj if (integer_nonzerop (cond) && !TREE_SIDE_EFFECTS (else_)) 180*38fd1498Szrj stmt = then_; 181*38fd1498Szrj else if (integer_zerop (cond) && !TREE_SIDE_EFFECTS (then_)) 182*38fd1498Szrj stmt = else_; 183*38fd1498Szrj else 184*38fd1498Szrj stmt = build3 (COND_EXPR, void_type_node, cond, then_, else_); 185*38fd1498Szrj if (!EXPR_HAS_LOCATION (stmt)) 186*38fd1498Szrj protected_set_expr_location (stmt, locus); 187*38fd1498Szrj *stmt_p = stmt; 188*38fd1498Szrj } 189*38fd1498Szrj 190*38fd1498Szrj /* Build a generic representation of one of the C loop forms. COND is the 191*38fd1498Szrj loop condition or NULL_TREE. BODY is the (possibly compound) statement 192*38fd1498Szrj controlled by the loop. INCR is the increment expression of a for-loop, 193*38fd1498Szrj or NULL_TREE. COND_IS_FIRST indicates whether the condition is 194*38fd1498Szrj evaluated before the loop body as in while and for loops, or after the 195*38fd1498Szrj loop body as in do-while loops. */ 196*38fd1498Szrj 197*38fd1498Szrj static void 198*38fd1498Szrj genericize_cp_loop (tree *stmt_p, location_t start_locus, tree cond, tree body, 199*38fd1498Szrj tree incr, bool cond_is_first, int *walk_subtrees, 200*38fd1498Szrj void *data) 201*38fd1498Szrj { 202*38fd1498Szrj tree blab, clab; 203*38fd1498Szrj tree exit = NULL; 204*38fd1498Szrj tree stmt_list = NULL; 205*38fd1498Szrj 206*38fd1498Szrj blab = begin_bc_block (bc_break, start_locus); 207*38fd1498Szrj clab = begin_bc_block (bc_continue, start_locus); 208*38fd1498Szrj 209*38fd1498Szrj protected_set_expr_location (incr, start_locus); 210*38fd1498Szrj 211*38fd1498Szrj cp_walk_tree (&cond, cp_genericize_r, data, NULL); 212*38fd1498Szrj cp_walk_tree (&body, cp_genericize_r, data, NULL); 213*38fd1498Szrj cp_walk_tree (&incr, cp_genericize_r, data, NULL); 214*38fd1498Szrj *walk_subtrees = 0; 215*38fd1498Szrj 216*38fd1498Szrj if (cond && TREE_CODE (cond) != INTEGER_CST) 217*38fd1498Szrj { 218*38fd1498Szrj /* If COND is constant, don't bother building an exit. If it's false, 219*38fd1498Szrj we won't build a loop. If it's true, any exits are in the body. */ 220*38fd1498Szrj location_t cloc = EXPR_LOC_OR_LOC (cond, start_locus); 221*38fd1498Szrj exit = build1_loc (cloc, GOTO_EXPR, void_type_node, 222*38fd1498Szrj get_bc_label (bc_break)); 223*38fd1498Szrj exit = fold_build3_loc (cloc, COND_EXPR, void_type_node, cond, 224*38fd1498Szrj build_empty_stmt (cloc), exit); 225*38fd1498Szrj } 226*38fd1498Szrj 227*38fd1498Szrj if (exit && cond_is_first) 228*38fd1498Szrj append_to_statement_list (exit, &stmt_list); 229*38fd1498Szrj append_to_statement_list (body, &stmt_list); 230*38fd1498Szrj finish_bc_block (&stmt_list, bc_continue, clab); 231*38fd1498Szrj append_to_statement_list (incr, &stmt_list); 232*38fd1498Szrj if (exit && !cond_is_first) 233*38fd1498Szrj append_to_statement_list (exit, &stmt_list); 234*38fd1498Szrj 235*38fd1498Szrj if (!stmt_list) 236*38fd1498Szrj stmt_list = build_empty_stmt (start_locus); 237*38fd1498Szrj 238*38fd1498Szrj tree loop; 239*38fd1498Szrj if (cond && integer_zerop (cond)) 240*38fd1498Szrj { 241*38fd1498Szrj if (cond_is_first) 242*38fd1498Szrj loop = fold_build3_loc (start_locus, COND_EXPR, 243*38fd1498Szrj void_type_node, cond, stmt_list, 244*38fd1498Szrj build_empty_stmt (start_locus)); 245*38fd1498Szrj else 246*38fd1498Szrj loop = stmt_list; 247*38fd1498Szrj } 248*38fd1498Szrj else 249*38fd1498Szrj { 250*38fd1498Szrj location_t loc = start_locus; 251*38fd1498Szrj if (!cond || integer_nonzerop (cond)) 252*38fd1498Szrj loc = EXPR_LOCATION (expr_first (body)); 253*38fd1498Szrj if (loc == UNKNOWN_LOCATION) 254*38fd1498Szrj loc = start_locus; 255*38fd1498Szrj loop = build1_loc (loc, LOOP_EXPR, void_type_node, stmt_list); 256*38fd1498Szrj } 257*38fd1498Szrj 258*38fd1498Szrj stmt_list = NULL; 259*38fd1498Szrj append_to_statement_list (loop, &stmt_list); 260*38fd1498Szrj finish_bc_block (&stmt_list, bc_break, blab); 261*38fd1498Szrj if (!stmt_list) 262*38fd1498Szrj stmt_list = build_empty_stmt (start_locus); 263*38fd1498Szrj 264*38fd1498Szrj *stmt_p = stmt_list; 265*38fd1498Szrj } 266*38fd1498Szrj 267*38fd1498Szrj /* Genericize a FOR_STMT node *STMT_P. */ 268*38fd1498Szrj 269*38fd1498Szrj static void 270*38fd1498Szrj genericize_for_stmt (tree *stmt_p, int *walk_subtrees, void *data) 271*38fd1498Szrj { 272*38fd1498Szrj tree stmt = *stmt_p; 273*38fd1498Szrj tree expr = NULL; 274*38fd1498Szrj tree loop; 275*38fd1498Szrj tree init = FOR_INIT_STMT (stmt); 276*38fd1498Szrj 277*38fd1498Szrj if (init) 278*38fd1498Szrj { 279*38fd1498Szrj cp_walk_tree (&init, cp_genericize_r, data, NULL); 280*38fd1498Szrj append_to_statement_list (init, &expr); 281*38fd1498Szrj } 282*38fd1498Szrj 283*38fd1498Szrj genericize_cp_loop (&loop, EXPR_LOCATION (stmt), FOR_COND (stmt), 284*38fd1498Szrj FOR_BODY (stmt), FOR_EXPR (stmt), 1, walk_subtrees, data); 285*38fd1498Szrj append_to_statement_list (loop, &expr); 286*38fd1498Szrj if (expr == NULL_TREE) 287*38fd1498Szrj expr = loop; 288*38fd1498Szrj *stmt_p = expr; 289*38fd1498Szrj } 290*38fd1498Szrj 291*38fd1498Szrj /* Genericize a WHILE_STMT node *STMT_P. */ 292*38fd1498Szrj 293*38fd1498Szrj static void 294*38fd1498Szrj genericize_while_stmt (tree *stmt_p, int *walk_subtrees, void *data) 295*38fd1498Szrj { 296*38fd1498Szrj tree stmt = *stmt_p; 297*38fd1498Szrj genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), WHILE_COND (stmt), 298*38fd1498Szrj WHILE_BODY (stmt), NULL_TREE, 1, walk_subtrees, data); 299*38fd1498Szrj } 300*38fd1498Szrj 301*38fd1498Szrj /* Genericize a DO_STMT node *STMT_P. */ 302*38fd1498Szrj 303*38fd1498Szrj static void 304*38fd1498Szrj genericize_do_stmt (tree *stmt_p, int *walk_subtrees, void *data) 305*38fd1498Szrj { 306*38fd1498Szrj tree stmt = *stmt_p; 307*38fd1498Szrj genericize_cp_loop (stmt_p, EXPR_LOCATION (stmt), DO_COND (stmt), 308*38fd1498Szrj DO_BODY (stmt), NULL_TREE, 0, walk_subtrees, data); 309*38fd1498Szrj } 310*38fd1498Szrj 311*38fd1498Szrj /* Genericize a SWITCH_STMT node *STMT_P by turning it into a SWITCH_EXPR. */ 312*38fd1498Szrj 313*38fd1498Szrj static void 314*38fd1498Szrj genericize_switch_stmt (tree *stmt_p, int *walk_subtrees, void *data) 315*38fd1498Szrj { 316*38fd1498Szrj tree stmt = *stmt_p; 317*38fd1498Szrj tree break_block, body, cond, type; 318*38fd1498Szrj location_t stmt_locus = EXPR_LOCATION (stmt); 319*38fd1498Szrj 320*38fd1498Szrj break_block = begin_bc_block (bc_break, stmt_locus); 321*38fd1498Szrj 322*38fd1498Szrj body = SWITCH_STMT_BODY (stmt); 323*38fd1498Szrj if (!body) 324*38fd1498Szrj body = build_empty_stmt (stmt_locus); 325*38fd1498Szrj cond = SWITCH_STMT_COND (stmt); 326*38fd1498Szrj type = SWITCH_STMT_TYPE (stmt); 327*38fd1498Szrj 328*38fd1498Szrj cp_walk_tree (&body, cp_genericize_r, data, NULL); 329*38fd1498Szrj cp_walk_tree (&cond, cp_genericize_r, data, NULL); 330*38fd1498Szrj cp_walk_tree (&type, cp_genericize_r, data, NULL); 331*38fd1498Szrj *walk_subtrees = 0; 332*38fd1498Szrj 333*38fd1498Szrj if (TREE_USED (break_block)) 334*38fd1498Szrj SWITCH_BREAK_LABEL_P (break_block) = 1; 335*38fd1498Szrj finish_bc_block (&body, bc_break, break_block); 336*38fd1498Szrj *stmt_p = build2_loc (stmt_locus, SWITCH_EXPR, type, cond, body); 337*38fd1498Szrj SWITCH_ALL_CASES_P (*stmt_p) = SWITCH_STMT_ALL_CASES_P (stmt); 338*38fd1498Szrj gcc_checking_assert (!SWITCH_STMT_NO_BREAK_P (stmt) 339*38fd1498Szrj || !TREE_USED (break_block)); 340*38fd1498Szrj } 341*38fd1498Szrj 342*38fd1498Szrj /* Genericize a CONTINUE_STMT node *STMT_P. */ 343*38fd1498Szrj 344*38fd1498Szrj static void 345*38fd1498Szrj genericize_continue_stmt (tree *stmt_p) 346*38fd1498Szrj { 347*38fd1498Szrj tree stmt_list = NULL; 348*38fd1498Szrj tree pred = build_predict_expr (PRED_CONTINUE, NOT_TAKEN); 349*38fd1498Szrj tree label = get_bc_label (bc_continue); 350*38fd1498Szrj location_t location = EXPR_LOCATION (*stmt_p); 351*38fd1498Szrj tree jump = build1_loc (location, GOTO_EXPR, void_type_node, label); 352*38fd1498Szrj append_to_statement_list_force (pred, &stmt_list); 353*38fd1498Szrj append_to_statement_list (jump, &stmt_list); 354*38fd1498Szrj *stmt_p = stmt_list; 355*38fd1498Szrj } 356*38fd1498Szrj 357*38fd1498Szrj /* Genericize a BREAK_STMT node *STMT_P. */ 358*38fd1498Szrj 359*38fd1498Szrj static void 360*38fd1498Szrj genericize_break_stmt (tree *stmt_p) 361*38fd1498Szrj { 362*38fd1498Szrj tree label = get_bc_label (bc_break); 363*38fd1498Szrj location_t location = EXPR_LOCATION (*stmt_p); 364*38fd1498Szrj *stmt_p = build1_loc (location, GOTO_EXPR, void_type_node, label); 365*38fd1498Szrj } 366*38fd1498Szrj 367*38fd1498Szrj /* Genericize a OMP_FOR node *STMT_P. */ 368*38fd1498Szrj 369*38fd1498Szrj static void 370*38fd1498Szrj genericize_omp_for_stmt (tree *stmt_p, int *walk_subtrees, void *data) 371*38fd1498Szrj { 372*38fd1498Szrj tree stmt = *stmt_p; 373*38fd1498Szrj location_t locus = EXPR_LOCATION (stmt); 374*38fd1498Szrj tree clab = begin_bc_block (bc_continue, locus); 375*38fd1498Szrj 376*38fd1498Szrj cp_walk_tree (&OMP_FOR_BODY (stmt), cp_genericize_r, data, NULL); 377*38fd1498Szrj if (TREE_CODE (stmt) != OMP_TASKLOOP) 378*38fd1498Szrj cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_genericize_r, data, NULL); 379*38fd1498Szrj cp_walk_tree (&OMP_FOR_INIT (stmt), cp_genericize_r, data, NULL); 380*38fd1498Szrj cp_walk_tree (&OMP_FOR_COND (stmt), cp_genericize_r, data, NULL); 381*38fd1498Szrj cp_walk_tree (&OMP_FOR_INCR (stmt), cp_genericize_r, data, NULL); 382*38fd1498Szrj cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_genericize_r, data, NULL); 383*38fd1498Szrj *walk_subtrees = 0; 384*38fd1498Szrj 385*38fd1498Szrj finish_bc_block (&OMP_FOR_BODY (stmt), bc_continue, clab); 386*38fd1498Szrj } 387*38fd1498Szrj 388*38fd1498Szrj /* Hook into the middle of gimplifying an OMP_FOR node. */ 389*38fd1498Szrj 390*38fd1498Szrj static enum gimplify_status 391*38fd1498Szrj cp_gimplify_omp_for (tree *expr_p, gimple_seq *pre_p) 392*38fd1498Szrj { 393*38fd1498Szrj tree for_stmt = *expr_p; 394*38fd1498Szrj gimple_seq seq = NULL; 395*38fd1498Szrj 396*38fd1498Szrj /* Protect ourselves from recursion. */ 397*38fd1498Szrj if (OMP_FOR_GIMPLIFYING_P (for_stmt)) 398*38fd1498Szrj return GS_UNHANDLED; 399*38fd1498Szrj OMP_FOR_GIMPLIFYING_P (for_stmt) = 1; 400*38fd1498Szrj 401*38fd1498Szrj gimplify_and_add (for_stmt, &seq); 402*38fd1498Szrj gimple_seq_add_seq (pre_p, seq); 403*38fd1498Szrj 404*38fd1498Szrj OMP_FOR_GIMPLIFYING_P (for_stmt) = 0; 405*38fd1498Szrj 406*38fd1498Szrj return GS_ALL_DONE; 407*38fd1498Szrj } 408*38fd1498Szrj 409*38fd1498Szrj /* Gimplify an EXPR_STMT node. */ 410*38fd1498Szrj 411*38fd1498Szrj static void 412*38fd1498Szrj gimplify_expr_stmt (tree *stmt_p) 413*38fd1498Szrj { 414*38fd1498Szrj tree stmt = EXPR_STMT_EXPR (*stmt_p); 415*38fd1498Szrj 416*38fd1498Szrj if (stmt == error_mark_node) 417*38fd1498Szrj stmt = NULL; 418*38fd1498Szrj 419*38fd1498Szrj /* Gimplification of a statement expression will nullify the 420*38fd1498Szrj statement if all its side effects are moved to *PRE_P and *POST_P. 421*38fd1498Szrj 422*38fd1498Szrj In this case we will not want to emit the gimplified statement. 423*38fd1498Szrj However, we may still want to emit a warning, so we do that before 424*38fd1498Szrj gimplification. */ 425*38fd1498Szrj if (stmt && warn_unused_value) 426*38fd1498Szrj { 427*38fd1498Szrj if (!TREE_SIDE_EFFECTS (stmt)) 428*38fd1498Szrj { 429*38fd1498Szrj if (!IS_EMPTY_STMT (stmt) 430*38fd1498Szrj && !VOID_TYPE_P (TREE_TYPE (stmt)) 431*38fd1498Szrj && !TREE_NO_WARNING (stmt)) 432*38fd1498Szrj warning (OPT_Wunused_value, "statement with no effect"); 433*38fd1498Szrj } 434*38fd1498Szrj else 435*38fd1498Szrj warn_if_unused_value (stmt, input_location); 436*38fd1498Szrj } 437*38fd1498Szrj 438*38fd1498Szrj if (stmt == NULL_TREE) 439*38fd1498Szrj stmt = alloc_stmt_list (); 440*38fd1498Szrj 441*38fd1498Szrj *stmt_p = stmt; 442*38fd1498Szrj } 443*38fd1498Szrj 444*38fd1498Szrj /* Gimplify initialization from an AGGR_INIT_EXPR. */ 445*38fd1498Szrj 446*38fd1498Szrj static void 447*38fd1498Szrj cp_gimplify_init_expr (tree *expr_p) 448*38fd1498Szrj { 449*38fd1498Szrj tree from = TREE_OPERAND (*expr_p, 1); 450*38fd1498Szrj tree to = TREE_OPERAND (*expr_p, 0); 451*38fd1498Szrj tree t; 452*38fd1498Szrj 453*38fd1498Szrj /* What about code that pulls out the temp and uses it elsewhere? I 454*38fd1498Szrj think that such code never uses the TARGET_EXPR as an initializer. If 455*38fd1498Szrj I'm wrong, we'll abort because the temp won't have any RTL. In that 456*38fd1498Szrj case, I guess we'll need to replace references somehow. */ 457*38fd1498Szrj if (TREE_CODE (from) == TARGET_EXPR) 458*38fd1498Szrj from = TARGET_EXPR_INITIAL (from); 459*38fd1498Szrj 460*38fd1498Szrj /* Look through any COMPOUND_EXPRs, since build_compound_expr pushes them 461*38fd1498Szrj inside the TARGET_EXPR. */ 462*38fd1498Szrj for (t = from; t; ) 463*38fd1498Szrj { 464*38fd1498Szrj tree sub = TREE_CODE (t) == COMPOUND_EXPR ? TREE_OPERAND (t, 0) : t; 465*38fd1498Szrj 466*38fd1498Szrj /* If we are initializing from an AGGR_INIT_EXPR, drop the INIT_EXPR and 467*38fd1498Szrj replace the slot operand with our target. 468*38fd1498Szrj 469*38fd1498Szrj Should we add a target parm to gimplify_expr instead? No, as in this 470*38fd1498Szrj case we want to replace the INIT_EXPR. */ 471*38fd1498Szrj if (TREE_CODE (sub) == AGGR_INIT_EXPR 472*38fd1498Szrj || TREE_CODE (sub) == VEC_INIT_EXPR) 473*38fd1498Szrj { 474*38fd1498Szrj if (TREE_CODE (sub) == AGGR_INIT_EXPR) 475*38fd1498Szrj AGGR_INIT_EXPR_SLOT (sub) = to; 476*38fd1498Szrj else 477*38fd1498Szrj VEC_INIT_EXPR_SLOT (sub) = to; 478*38fd1498Szrj *expr_p = from; 479*38fd1498Szrj 480*38fd1498Szrj /* The initialization is now a side-effect, so the container can 481*38fd1498Szrj become void. */ 482*38fd1498Szrj if (from != sub) 483*38fd1498Szrj TREE_TYPE (from) = void_type_node; 484*38fd1498Szrj } 485*38fd1498Szrj 486*38fd1498Szrj /* Handle aggregate NSDMI. */ 487*38fd1498Szrj replace_placeholders (sub, to); 488*38fd1498Szrj 489*38fd1498Szrj if (t == sub) 490*38fd1498Szrj break; 491*38fd1498Szrj else 492*38fd1498Szrj t = TREE_OPERAND (t, 1); 493*38fd1498Szrj } 494*38fd1498Szrj 495*38fd1498Szrj } 496*38fd1498Szrj 497*38fd1498Szrj /* Gimplify a MUST_NOT_THROW_EXPR. */ 498*38fd1498Szrj 499*38fd1498Szrj static enum gimplify_status 500*38fd1498Szrj gimplify_must_not_throw_expr (tree *expr_p, gimple_seq *pre_p) 501*38fd1498Szrj { 502*38fd1498Szrj tree stmt = *expr_p; 503*38fd1498Szrj tree temp = voidify_wrapper_expr (stmt, NULL); 504*38fd1498Szrj tree body = TREE_OPERAND (stmt, 0); 505*38fd1498Szrj gimple_seq try_ = NULL; 506*38fd1498Szrj gimple_seq catch_ = NULL; 507*38fd1498Szrj gimple *mnt; 508*38fd1498Szrj 509*38fd1498Szrj gimplify_and_add (body, &try_); 510*38fd1498Szrj mnt = gimple_build_eh_must_not_throw (terminate_fn); 511*38fd1498Szrj gimple_seq_add_stmt_without_update (&catch_, mnt); 512*38fd1498Szrj mnt = gimple_build_try (try_, catch_, GIMPLE_TRY_CATCH); 513*38fd1498Szrj 514*38fd1498Szrj gimple_seq_add_stmt_without_update (pre_p, mnt); 515*38fd1498Szrj if (temp) 516*38fd1498Szrj { 517*38fd1498Szrj *expr_p = temp; 518*38fd1498Szrj return GS_OK; 519*38fd1498Szrj } 520*38fd1498Szrj 521*38fd1498Szrj *expr_p = NULL; 522*38fd1498Szrj return GS_ALL_DONE; 523*38fd1498Szrj } 524*38fd1498Szrj 525*38fd1498Szrj /* Return TRUE if an operand (OP) of a given TYPE being copied is 526*38fd1498Szrj really just an empty class copy. 527*38fd1498Szrj 528*38fd1498Szrj Check that the operand has a simple form so that TARGET_EXPRs and 529*38fd1498Szrj non-empty CONSTRUCTORs get reduced properly, and we leave the 530*38fd1498Szrj return slot optimization alone because it isn't a copy. */ 531*38fd1498Szrj 532*38fd1498Szrj static bool 533*38fd1498Szrj simple_empty_class_p (tree type, tree op) 534*38fd1498Szrj { 535*38fd1498Szrj return 536*38fd1498Szrj ((TREE_CODE (op) == COMPOUND_EXPR 537*38fd1498Szrj && simple_empty_class_p (type, TREE_OPERAND (op, 1))) 538*38fd1498Szrj || TREE_CODE (op) == EMPTY_CLASS_EXPR 539*38fd1498Szrj || is_gimple_lvalue (op) 540*38fd1498Szrj || INDIRECT_REF_P (op) 541*38fd1498Szrj || (TREE_CODE (op) == CONSTRUCTOR 542*38fd1498Szrj && CONSTRUCTOR_NELTS (op) == 0 543*38fd1498Szrj && !TREE_CLOBBER_P (op)) 544*38fd1498Szrj || (TREE_CODE (op) == CALL_EXPR 545*38fd1498Szrj && !CALL_EXPR_RETURN_SLOT_OPT (op))) 546*38fd1498Szrj && is_really_empty_class (type); 547*38fd1498Szrj } 548*38fd1498Szrj 549*38fd1498Szrj /* Returns true if evaluating E as an lvalue has side-effects; 550*38fd1498Szrj specifically, a volatile lvalue has TREE_SIDE_EFFECTS, but it doesn't really 551*38fd1498Szrj have side-effects until there is a read or write through it. */ 552*38fd1498Szrj 553*38fd1498Szrj static bool 554*38fd1498Szrj lvalue_has_side_effects (tree e) 555*38fd1498Szrj { 556*38fd1498Szrj if (!TREE_SIDE_EFFECTS (e)) 557*38fd1498Szrj return false; 558*38fd1498Szrj while (handled_component_p (e)) 559*38fd1498Szrj { 560*38fd1498Szrj if (TREE_CODE (e) == ARRAY_REF 561*38fd1498Szrj && TREE_SIDE_EFFECTS (TREE_OPERAND (e, 1))) 562*38fd1498Szrj return true; 563*38fd1498Szrj e = TREE_OPERAND (e, 0); 564*38fd1498Szrj } 565*38fd1498Szrj if (DECL_P (e)) 566*38fd1498Szrj /* Just naming a variable has no side-effects. */ 567*38fd1498Szrj return false; 568*38fd1498Szrj else if (INDIRECT_REF_P (e)) 569*38fd1498Szrj /* Similarly, indirection has no side-effects. */ 570*38fd1498Szrj return TREE_SIDE_EFFECTS (TREE_OPERAND (e, 0)); 571*38fd1498Szrj else 572*38fd1498Szrj /* For anything else, trust TREE_SIDE_EFFECTS. */ 573*38fd1498Szrj return TREE_SIDE_EFFECTS (e); 574*38fd1498Szrj } 575*38fd1498Szrj 576*38fd1498Szrj /* Do C++-specific gimplification. Args are as for gimplify_expr. */ 577*38fd1498Szrj 578*38fd1498Szrj int 579*38fd1498Szrj cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p) 580*38fd1498Szrj { 581*38fd1498Szrj int saved_stmts_are_full_exprs_p = 0; 582*38fd1498Szrj location_t loc = EXPR_LOC_OR_LOC (*expr_p, input_location); 583*38fd1498Szrj enum tree_code code = TREE_CODE (*expr_p); 584*38fd1498Szrj enum gimplify_status ret; 585*38fd1498Szrj 586*38fd1498Szrj if (STATEMENT_CODE_P (code)) 587*38fd1498Szrj { 588*38fd1498Szrj saved_stmts_are_full_exprs_p = stmts_are_full_exprs_p (); 589*38fd1498Szrj current_stmt_tree ()->stmts_are_full_exprs_p 590*38fd1498Szrj = STMT_IS_FULL_EXPR_P (*expr_p); 591*38fd1498Szrj } 592*38fd1498Szrj 593*38fd1498Szrj switch (code) 594*38fd1498Szrj { 595*38fd1498Szrj case AGGR_INIT_EXPR: 596*38fd1498Szrj simplify_aggr_init_expr (expr_p); 597*38fd1498Szrj ret = GS_OK; 598*38fd1498Szrj break; 599*38fd1498Szrj 600*38fd1498Szrj case VEC_INIT_EXPR: 601*38fd1498Szrj { 602*38fd1498Szrj location_t loc = input_location; 603*38fd1498Szrj tree init = VEC_INIT_EXPR_INIT (*expr_p); 604*38fd1498Szrj int from_array = (init && TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE); 605*38fd1498Szrj gcc_assert (EXPR_HAS_LOCATION (*expr_p)); 606*38fd1498Szrj input_location = EXPR_LOCATION (*expr_p); 607*38fd1498Szrj *expr_p = build_vec_init (VEC_INIT_EXPR_SLOT (*expr_p), NULL_TREE, 608*38fd1498Szrj init, VEC_INIT_EXPR_VALUE_INIT (*expr_p), 609*38fd1498Szrj from_array, 610*38fd1498Szrj tf_warning_or_error); 611*38fd1498Szrj hash_set<tree> pset; 612*38fd1498Szrj cp_walk_tree (expr_p, cp_fold_r, &pset, NULL); 613*38fd1498Szrj cp_genericize_tree (expr_p, false); 614*38fd1498Szrj ret = GS_OK; 615*38fd1498Szrj input_location = loc; 616*38fd1498Szrj } 617*38fd1498Szrj break; 618*38fd1498Szrj 619*38fd1498Szrj case THROW_EXPR: 620*38fd1498Szrj /* FIXME communicate throw type to back end, probably by moving 621*38fd1498Szrj THROW_EXPR into ../tree.def. */ 622*38fd1498Szrj *expr_p = TREE_OPERAND (*expr_p, 0); 623*38fd1498Szrj ret = GS_OK; 624*38fd1498Szrj break; 625*38fd1498Szrj 626*38fd1498Szrj case MUST_NOT_THROW_EXPR: 627*38fd1498Szrj ret = gimplify_must_not_throw_expr (expr_p, pre_p); 628*38fd1498Szrj break; 629*38fd1498Szrj 630*38fd1498Szrj /* We used to do this for MODIFY_EXPR as well, but that's unsafe; the 631*38fd1498Szrj LHS of an assignment might also be involved in the RHS, as in bug 632*38fd1498Szrj 25979. */ 633*38fd1498Szrj case INIT_EXPR: 634*38fd1498Szrj cp_gimplify_init_expr (expr_p); 635*38fd1498Szrj if (TREE_CODE (*expr_p) != INIT_EXPR) 636*38fd1498Szrj return GS_OK; 637*38fd1498Szrj /* Fall through. */ 638*38fd1498Szrj case MODIFY_EXPR: 639*38fd1498Szrj modify_expr_case: 640*38fd1498Szrj { 641*38fd1498Szrj /* If the back end isn't clever enough to know that the lhs and rhs 642*38fd1498Szrj types are the same, add an explicit conversion. */ 643*38fd1498Szrj tree op0 = TREE_OPERAND (*expr_p, 0); 644*38fd1498Szrj tree op1 = TREE_OPERAND (*expr_p, 1); 645*38fd1498Szrj 646*38fd1498Szrj if (!error_operand_p (op0) 647*38fd1498Szrj && !error_operand_p (op1) 648*38fd1498Szrj && (TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op0)) 649*38fd1498Szrj || TYPE_STRUCTURAL_EQUALITY_P (TREE_TYPE (op1))) 650*38fd1498Szrj && !useless_type_conversion_p (TREE_TYPE (op1), TREE_TYPE (op0))) 651*38fd1498Szrj TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR, 652*38fd1498Szrj TREE_TYPE (op0), op1); 653*38fd1498Szrj 654*38fd1498Szrj else if (simple_empty_class_p (TREE_TYPE (op0), op1)) 655*38fd1498Szrj { 656*38fd1498Szrj /* Remove any copies of empty classes. Also drop volatile 657*38fd1498Szrj variables on the RHS to avoid infinite recursion from 658*38fd1498Szrj gimplify_expr trying to load the value. */ 659*38fd1498Szrj if (TREE_SIDE_EFFECTS (op1)) 660*38fd1498Szrj { 661*38fd1498Szrj if (TREE_THIS_VOLATILE (op1) 662*38fd1498Szrj && (REFERENCE_CLASS_P (op1) || DECL_P (op1))) 663*38fd1498Szrj op1 = build_fold_addr_expr (op1); 664*38fd1498Szrj 665*38fd1498Szrj gimplify_and_add (op1, pre_p); 666*38fd1498Szrj } 667*38fd1498Szrj gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p, 668*38fd1498Szrj is_gimple_lvalue, fb_lvalue); 669*38fd1498Szrj *expr_p = TREE_OPERAND (*expr_p, 0); 670*38fd1498Szrj } 671*38fd1498Szrj /* P0145 says that the RHS is sequenced before the LHS. 672*38fd1498Szrj gimplify_modify_expr gimplifies the RHS before the LHS, but that 673*38fd1498Szrj isn't quite strong enough in two cases: 674*38fd1498Szrj 675*38fd1498Szrj 1) gimplify.c wants to leave a CALL_EXPR on the RHS, which would 676*38fd1498Szrj mean it's evaluated after the LHS. 677*38fd1498Szrj 678*38fd1498Szrj 2) the value calculation of the RHS is also sequenced before the 679*38fd1498Szrj LHS, so for scalar assignment we need to preevaluate if the 680*38fd1498Szrj RHS could be affected by LHS side-effects even if it has no 681*38fd1498Szrj side-effects of its own. We don't need this for classes because 682*38fd1498Szrj class assignment takes its RHS by reference. */ 683*38fd1498Szrj else if (flag_strong_eval_order > 1 684*38fd1498Szrj && TREE_CODE (*expr_p) == MODIFY_EXPR 685*38fd1498Szrj && lvalue_has_side_effects (op0) 686*38fd1498Szrj && (TREE_CODE (op1) == CALL_EXPR 687*38fd1498Szrj || (SCALAR_TYPE_P (TREE_TYPE (op1)) 688*38fd1498Szrj && !TREE_CONSTANT (op1)))) 689*38fd1498Szrj TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (op1, pre_p); 690*38fd1498Szrj } 691*38fd1498Szrj ret = GS_OK; 692*38fd1498Szrj break; 693*38fd1498Szrj 694*38fd1498Szrj case EMPTY_CLASS_EXPR: 695*38fd1498Szrj /* We create an empty CONSTRUCTOR with RECORD_TYPE. */ 696*38fd1498Szrj *expr_p = build_constructor (TREE_TYPE (*expr_p), NULL); 697*38fd1498Szrj ret = GS_OK; 698*38fd1498Szrj break; 699*38fd1498Szrj 700*38fd1498Szrj case BASELINK: 701*38fd1498Szrj *expr_p = BASELINK_FUNCTIONS (*expr_p); 702*38fd1498Szrj ret = GS_OK; 703*38fd1498Szrj break; 704*38fd1498Szrj 705*38fd1498Szrj case TRY_BLOCK: 706*38fd1498Szrj genericize_try_block (expr_p); 707*38fd1498Szrj ret = GS_OK; 708*38fd1498Szrj break; 709*38fd1498Szrj 710*38fd1498Szrj case HANDLER: 711*38fd1498Szrj genericize_catch_block (expr_p); 712*38fd1498Szrj ret = GS_OK; 713*38fd1498Szrj break; 714*38fd1498Szrj 715*38fd1498Szrj case EH_SPEC_BLOCK: 716*38fd1498Szrj genericize_eh_spec_block (expr_p); 717*38fd1498Szrj ret = GS_OK; 718*38fd1498Szrj break; 719*38fd1498Szrj 720*38fd1498Szrj case USING_STMT: 721*38fd1498Szrj gcc_unreachable (); 722*38fd1498Szrj 723*38fd1498Szrj case FOR_STMT: 724*38fd1498Szrj case WHILE_STMT: 725*38fd1498Szrj case DO_STMT: 726*38fd1498Szrj case SWITCH_STMT: 727*38fd1498Szrj case CONTINUE_STMT: 728*38fd1498Szrj case BREAK_STMT: 729*38fd1498Szrj gcc_unreachable (); 730*38fd1498Szrj 731*38fd1498Szrj case OMP_FOR: 732*38fd1498Szrj case OMP_SIMD: 733*38fd1498Szrj case OMP_DISTRIBUTE: 734*38fd1498Szrj case OMP_TASKLOOP: 735*38fd1498Szrj ret = cp_gimplify_omp_for (expr_p, pre_p); 736*38fd1498Szrj break; 737*38fd1498Szrj 738*38fd1498Szrj case EXPR_STMT: 739*38fd1498Szrj gimplify_expr_stmt (expr_p); 740*38fd1498Szrj ret = GS_OK; 741*38fd1498Szrj break; 742*38fd1498Szrj 743*38fd1498Szrj case UNARY_PLUS_EXPR: 744*38fd1498Szrj { 745*38fd1498Szrj tree arg = TREE_OPERAND (*expr_p, 0); 746*38fd1498Szrj tree type = TREE_TYPE (*expr_p); 747*38fd1498Szrj *expr_p = (TREE_TYPE (arg) != type) ? fold_convert (type, arg) 748*38fd1498Szrj : arg; 749*38fd1498Szrj ret = GS_OK; 750*38fd1498Szrj } 751*38fd1498Szrj break; 752*38fd1498Szrj 753*38fd1498Szrj case CALL_EXPR: 754*38fd1498Szrj ret = GS_OK; 755*38fd1498Szrj if (!CALL_EXPR_FN (*expr_p)) 756*38fd1498Szrj /* Internal function call. */; 757*38fd1498Szrj else if (CALL_EXPR_REVERSE_ARGS (*expr_p)) 758*38fd1498Szrj { 759*38fd1498Szrj /* This is a call to a (compound) assignment operator that used 760*38fd1498Szrj the operator syntax; gimplify the RHS first. */ 761*38fd1498Szrj gcc_assert (call_expr_nargs (*expr_p) == 2); 762*38fd1498Szrj gcc_assert (!CALL_EXPR_ORDERED_ARGS (*expr_p)); 763*38fd1498Szrj enum gimplify_status t 764*38fd1498Szrj = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 1), pre_p, loc); 765*38fd1498Szrj if (t == GS_ERROR) 766*38fd1498Szrj ret = GS_ERROR; 767*38fd1498Szrj } 768*38fd1498Szrj else if (CALL_EXPR_ORDERED_ARGS (*expr_p)) 769*38fd1498Szrj { 770*38fd1498Szrj /* Leave the last argument for gimplify_call_expr, to avoid problems 771*38fd1498Szrj with __builtin_va_arg_pack(). */ 772*38fd1498Szrj int nargs = call_expr_nargs (*expr_p) - 1; 773*38fd1498Szrj for (int i = 0; i < nargs; ++i) 774*38fd1498Szrj { 775*38fd1498Szrj enum gimplify_status t 776*38fd1498Szrj = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p, loc); 777*38fd1498Szrj if (t == GS_ERROR) 778*38fd1498Szrj ret = GS_ERROR; 779*38fd1498Szrj } 780*38fd1498Szrj } 781*38fd1498Szrj else if (flag_strong_eval_order 782*38fd1498Szrj && !CALL_EXPR_OPERATOR_SYNTAX (*expr_p)) 783*38fd1498Szrj { 784*38fd1498Szrj /* If flag_strong_eval_order, evaluate the object argument first. */ 785*38fd1498Szrj tree fntype = TREE_TYPE (CALL_EXPR_FN (*expr_p)); 786*38fd1498Szrj if (POINTER_TYPE_P (fntype)) 787*38fd1498Szrj fntype = TREE_TYPE (fntype); 788*38fd1498Szrj if (TREE_CODE (fntype) == METHOD_TYPE) 789*38fd1498Szrj { 790*38fd1498Szrj enum gimplify_status t 791*38fd1498Szrj = gimplify_arg (&CALL_EXPR_ARG (*expr_p, 0), pre_p, loc); 792*38fd1498Szrj if (t == GS_ERROR) 793*38fd1498Szrj ret = GS_ERROR; 794*38fd1498Szrj } 795*38fd1498Szrj } 796*38fd1498Szrj break; 797*38fd1498Szrj 798*38fd1498Szrj case RETURN_EXPR: 799*38fd1498Szrj if (TREE_OPERAND (*expr_p, 0) 800*38fd1498Szrj && (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == INIT_EXPR 801*38fd1498Szrj || TREE_CODE (TREE_OPERAND (*expr_p, 0)) == MODIFY_EXPR)) 802*38fd1498Szrj { 803*38fd1498Szrj expr_p = &TREE_OPERAND (*expr_p, 0); 804*38fd1498Szrj code = TREE_CODE (*expr_p); 805*38fd1498Szrj /* Avoid going through the INIT_EXPR case, which can 806*38fd1498Szrj degrade INIT_EXPRs into AGGR_INIT_EXPRs. */ 807*38fd1498Szrj goto modify_expr_case; 808*38fd1498Szrj } 809*38fd1498Szrj /* Fall through. */ 810*38fd1498Szrj 811*38fd1498Szrj default: 812*38fd1498Szrj ret = (enum gimplify_status) c_gimplify_expr (expr_p, pre_p, post_p); 813*38fd1498Szrj break; 814*38fd1498Szrj } 815*38fd1498Szrj 816*38fd1498Szrj /* Restore saved state. */ 817*38fd1498Szrj if (STATEMENT_CODE_P (code)) 818*38fd1498Szrj current_stmt_tree ()->stmts_are_full_exprs_p 819*38fd1498Szrj = saved_stmts_are_full_exprs_p; 820*38fd1498Szrj 821*38fd1498Szrj return ret; 822*38fd1498Szrj } 823*38fd1498Szrj 824*38fd1498Szrj static inline bool 825*38fd1498Szrj is_invisiref_parm (const_tree t) 826*38fd1498Szrj { 827*38fd1498Szrj return ((TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL) 828*38fd1498Szrj && DECL_BY_REFERENCE (t)); 829*38fd1498Szrj } 830*38fd1498Szrj 831*38fd1498Szrj /* Return true if the uid in both int tree maps are equal. */ 832*38fd1498Szrj 833*38fd1498Szrj bool 834*38fd1498Szrj cxx_int_tree_map_hasher::equal (cxx_int_tree_map *a, cxx_int_tree_map *b) 835*38fd1498Szrj { 836*38fd1498Szrj return (a->uid == b->uid); 837*38fd1498Szrj } 838*38fd1498Szrj 839*38fd1498Szrj /* Hash a UID in a cxx_int_tree_map. */ 840*38fd1498Szrj 841*38fd1498Szrj unsigned int 842*38fd1498Szrj cxx_int_tree_map_hasher::hash (cxx_int_tree_map *item) 843*38fd1498Szrj { 844*38fd1498Szrj return item->uid; 845*38fd1498Szrj } 846*38fd1498Szrj 847*38fd1498Szrj /* A stable comparison routine for use with splay trees and DECLs. */ 848*38fd1498Szrj 849*38fd1498Szrj static int 850*38fd1498Szrj splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb) 851*38fd1498Szrj { 852*38fd1498Szrj tree a = (tree) xa; 853*38fd1498Szrj tree b = (tree) xb; 854*38fd1498Szrj 855*38fd1498Szrj return DECL_UID (a) - DECL_UID (b); 856*38fd1498Szrj } 857*38fd1498Szrj 858*38fd1498Szrj /* OpenMP context during genericization. */ 859*38fd1498Szrj 860*38fd1498Szrj struct cp_genericize_omp_taskreg 861*38fd1498Szrj { 862*38fd1498Szrj bool is_parallel; 863*38fd1498Szrj bool default_shared; 864*38fd1498Szrj struct cp_genericize_omp_taskreg *outer; 865*38fd1498Szrj splay_tree variables; 866*38fd1498Szrj }; 867*38fd1498Szrj 868*38fd1498Szrj /* Return true if genericization should try to determine if 869*38fd1498Szrj DECL is firstprivate or shared within task regions. */ 870*38fd1498Szrj 871*38fd1498Szrj static bool 872*38fd1498Szrj omp_var_to_track (tree decl) 873*38fd1498Szrj { 874*38fd1498Szrj tree type = TREE_TYPE (decl); 875*38fd1498Szrj if (is_invisiref_parm (decl)) 876*38fd1498Szrj type = TREE_TYPE (type); 877*38fd1498Szrj else if (TREE_CODE (type) == REFERENCE_TYPE) 878*38fd1498Szrj type = TREE_TYPE (type); 879*38fd1498Szrj while (TREE_CODE (type) == ARRAY_TYPE) 880*38fd1498Szrj type = TREE_TYPE (type); 881*38fd1498Szrj if (type == error_mark_node || !CLASS_TYPE_P (type)) 882*38fd1498Szrj return false; 883*38fd1498Szrj if (VAR_P (decl) && CP_DECL_THREAD_LOCAL_P (decl)) 884*38fd1498Szrj return false; 885*38fd1498Szrj if (cxx_omp_predetermined_sharing (decl) != OMP_CLAUSE_DEFAULT_UNSPECIFIED) 886*38fd1498Szrj return false; 887*38fd1498Szrj return true; 888*38fd1498Szrj } 889*38fd1498Szrj 890*38fd1498Szrj /* Note DECL use in OpenMP region OMP_CTX during genericization. */ 891*38fd1498Szrj 892*38fd1498Szrj static void 893*38fd1498Szrj omp_cxx_notice_variable (struct cp_genericize_omp_taskreg *omp_ctx, tree decl) 894*38fd1498Szrj { 895*38fd1498Szrj splay_tree_node n = splay_tree_lookup (omp_ctx->variables, 896*38fd1498Szrj (splay_tree_key) decl); 897*38fd1498Szrj if (n == NULL) 898*38fd1498Szrj { 899*38fd1498Szrj int flags = OMP_CLAUSE_DEFAULT_SHARED; 900*38fd1498Szrj if (omp_ctx->outer) 901*38fd1498Szrj omp_cxx_notice_variable (omp_ctx->outer, decl); 902*38fd1498Szrj if (!omp_ctx->default_shared) 903*38fd1498Szrj { 904*38fd1498Szrj struct cp_genericize_omp_taskreg *octx; 905*38fd1498Szrj 906*38fd1498Szrj for (octx = omp_ctx->outer; octx; octx = octx->outer) 907*38fd1498Szrj { 908*38fd1498Szrj n = splay_tree_lookup (octx->variables, (splay_tree_key) decl); 909*38fd1498Szrj if (n && n->value != OMP_CLAUSE_DEFAULT_SHARED) 910*38fd1498Szrj { 911*38fd1498Szrj flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE; 912*38fd1498Szrj break; 913*38fd1498Szrj } 914*38fd1498Szrj if (octx->is_parallel) 915*38fd1498Szrj break; 916*38fd1498Szrj } 917*38fd1498Szrj if (octx == NULL 918*38fd1498Szrj && (TREE_CODE (decl) == PARM_DECL 919*38fd1498Szrj || (!(TREE_STATIC (decl) || DECL_EXTERNAL (decl)) 920*38fd1498Szrj && DECL_CONTEXT (decl) == current_function_decl))) 921*38fd1498Szrj flags = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE; 922*38fd1498Szrj if (flags == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE) 923*38fd1498Szrj { 924*38fd1498Szrj /* DECL is implicitly determined firstprivate in 925*38fd1498Szrj the current task construct. Ensure copy ctor and 926*38fd1498Szrj dtor are instantiated, because during gimplification 927*38fd1498Szrj it will be already too late. */ 928*38fd1498Szrj tree type = TREE_TYPE (decl); 929*38fd1498Szrj if (is_invisiref_parm (decl)) 930*38fd1498Szrj type = TREE_TYPE (type); 931*38fd1498Szrj else if (TREE_CODE (type) == REFERENCE_TYPE) 932*38fd1498Szrj type = TREE_TYPE (type); 933*38fd1498Szrj while (TREE_CODE (type) == ARRAY_TYPE) 934*38fd1498Szrj type = TREE_TYPE (type); 935*38fd1498Szrj get_copy_ctor (type, tf_none); 936*38fd1498Szrj get_dtor (type, tf_none); 937*38fd1498Szrj } 938*38fd1498Szrj } 939*38fd1498Szrj splay_tree_insert (omp_ctx->variables, (splay_tree_key) decl, flags); 940*38fd1498Szrj } 941*38fd1498Szrj } 942*38fd1498Szrj 943*38fd1498Szrj /* Genericization context. */ 944*38fd1498Szrj 945*38fd1498Szrj struct cp_genericize_data 946*38fd1498Szrj { 947*38fd1498Szrj hash_set<tree> *p_set; 948*38fd1498Szrj vec<tree> bind_expr_stack; 949*38fd1498Szrj struct cp_genericize_omp_taskreg *omp_ctx; 950*38fd1498Szrj tree try_block; 951*38fd1498Szrj bool no_sanitize_p; 952*38fd1498Szrj bool handle_invisiref_parm_p; 953*38fd1498Szrj }; 954*38fd1498Szrj 955*38fd1498Szrj /* Perform any pre-gimplification folding of C++ front end trees to 956*38fd1498Szrj GENERIC. 957*38fd1498Szrj Note: The folding of none-omp cases is something to move into 958*38fd1498Szrj the middle-end. As for now we have most foldings only on GENERIC 959*38fd1498Szrj in fold-const, we need to perform this before transformation to 960*38fd1498Szrj GIMPLE-form. */ 961*38fd1498Szrj 962*38fd1498Szrj static tree 963*38fd1498Szrj cp_fold_r (tree *stmt_p, int *walk_subtrees, void *data) 964*38fd1498Szrj { 965*38fd1498Szrj tree stmt; 966*38fd1498Szrj enum tree_code code; 967*38fd1498Szrj 968*38fd1498Szrj *stmt_p = stmt = cp_fold (*stmt_p); 969*38fd1498Szrj 970*38fd1498Szrj if (((hash_set<tree> *) data)->add (stmt)) 971*38fd1498Szrj { 972*38fd1498Szrj /* Don't walk subtrees of stmts we've already walked once, otherwise 973*38fd1498Szrj we can have exponential complexity with e.g. lots of nested 974*38fd1498Szrj SAVE_EXPRs or TARGET_EXPRs. cp_fold uses a cache and will return 975*38fd1498Szrj always the same tree, which the first time cp_fold_r has been 976*38fd1498Szrj called on it had the subtrees walked. */ 977*38fd1498Szrj *walk_subtrees = 0; 978*38fd1498Szrj return NULL; 979*38fd1498Szrj } 980*38fd1498Szrj 981*38fd1498Szrj code = TREE_CODE (stmt); 982*38fd1498Szrj if (code == OMP_FOR || code == OMP_SIMD || code == OMP_DISTRIBUTE 983*38fd1498Szrj || code == OMP_TASKLOOP || code == OACC_LOOP) 984*38fd1498Szrj { 985*38fd1498Szrj tree x; 986*38fd1498Szrj int i, n; 987*38fd1498Szrj 988*38fd1498Szrj cp_walk_tree (&OMP_FOR_BODY (stmt), cp_fold_r, data, NULL); 989*38fd1498Szrj cp_walk_tree (&OMP_FOR_CLAUSES (stmt), cp_fold_r, data, NULL); 990*38fd1498Szrj cp_walk_tree (&OMP_FOR_INIT (stmt), cp_fold_r, data, NULL); 991*38fd1498Szrj x = OMP_FOR_COND (stmt); 992*38fd1498Szrj if (x && TREE_CODE_CLASS (TREE_CODE (x)) == tcc_comparison) 993*38fd1498Szrj { 994*38fd1498Szrj cp_walk_tree (&TREE_OPERAND (x, 0), cp_fold_r, data, NULL); 995*38fd1498Szrj cp_walk_tree (&TREE_OPERAND (x, 1), cp_fold_r, data, NULL); 996*38fd1498Szrj } 997*38fd1498Szrj else if (x && TREE_CODE (x) == TREE_VEC) 998*38fd1498Szrj { 999*38fd1498Szrj n = TREE_VEC_LENGTH (x); 1000*38fd1498Szrj for (i = 0; i < n; i++) 1001*38fd1498Szrj { 1002*38fd1498Szrj tree o = TREE_VEC_ELT (x, i); 1003*38fd1498Szrj if (o && TREE_CODE_CLASS (TREE_CODE (o)) == tcc_comparison) 1004*38fd1498Szrj cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL); 1005*38fd1498Szrj } 1006*38fd1498Szrj } 1007*38fd1498Szrj x = OMP_FOR_INCR (stmt); 1008*38fd1498Szrj if (x && TREE_CODE (x) == TREE_VEC) 1009*38fd1498Szrj { 1010*38fd1498Szrj n = TREE_VEC_LENGTH (x); 1011*38fd1498Szrj for (i = 0; i < n; i++) 1012*38fd1498Szrj { 1013*38fd1498Szrj tree o = TREE_VEC_ELT (x, i); 1014*38fd1498Szrj if (o && TREE_CODE (o) == MODIFY_EXPR) 1015*38fd1498Szrj o = TREE_OPERAND (o, 1); 1016*38fd1498Szrj if (o && (TREE_CODE (o) == PLUS_EXPR || TREE_CODE (o) == MINUS_EXPR 1017*38fd1498Szrj || TREE_CODE (o) == POINTER_PLUS_EXPR)) 1018*38fd1498Szrj { 1019*38fd1498Szrj cp_walk_tree (&TREE_OPERAND (o, 0), cp_fold_r, data, NULL); 1020*38fd1498Szrj cp_walk_tree (&TREE_OPERAND (o, 1), cp_fold_r, data, NULL); 1021*38fd1498Szrj } 1022*38fd1498Szrj } 1023*38fd1498Szrj } 1024*38fd1498Szrj cp_walk_tree (&OMP_FOR_PRE_BODY (stmt), cp_fold_r, data, NULL); 1025*38fd1498Szrj *walk_subtrees = 0; 1026*38fd1498Szrj } 1027*38fd1498Szrj 1028*38fd1498Szrj return NULL; 1029*38fd1498Szrj } 1030*38fd1498Szrj 1031*38fd1498Szrj /* Fold ALL the trees! FIXME we should be able to remove this, but 1032*38fd1498Szrj apparently that still causes optimization regressions. */ 1033*38fd1498Szrj 1034*38fd1498Szrj void 1035*38fd1498Szrj cp_fold_function (tree fndecl) 1036*38fd1498Szrj { 1037*38fd1498Szrj hash_set<tree> pset; 1038*38fd1498Szrj cp_walk_tree (&DECL_SAVED_TREE (fndecl), cp_fold_r, &pset, NULL); 1039*38fd1498Szrj } 1040*38fd1498Szrj 1041*38fd1498Szrj /* Perform any pre-gimplification lowering of C++ front end trees to 1042*38fd1498Szrj GENERIC. */ 1043*38fd1498Szrj 1044*38fd1498Szrj static tree 1045*38fd1498Szrj cp_genericize_r (tree *stmt_p, int *walk_subtrees, void *data) 1046*38fd1498Szrj { 1047*38fd1498Szrj tree stmt = *stmt_p; 1048*38fd1498Szrj struct cp_genericize_data *wtd = (struct cp_genericize_data *) data; 1049*38fd1498Szrj hash_set<tree> *p_set = wtd->p_set; 1050*38fd1498Szrj 1051*38fd1498Szrj /* If in an OpenMP context, note var uses. */ 1052*38fd1498Szrj if (__builtin_expect (wtd->omp_ctx != NULL, 0) 1053*38fd1498Szrj && (VAR_P (stmt) 1054*38fd1498Szrj || TREE_CODE (stmt) == PARM_DECL 1055*38fd1498Szrj || TREE_CODE (stmt) == RESULT_DECL) 1056*38fd1498Szrj && omp_var_to_track (stmt)) 1057*38fd1498Szrj omp_cxx_notice_variable (wtd->omp_ctx, stmt); 1058*38fd1498Szrj 1059*38fd1498Szrj /* Don't dereference parms in a thunk, pass the references through. */ 1060*38fd1498Szrj if ((TREE_CODE (stmt) == CALL_EXPR && CALL_FROM_THUNK_P (stmt)) 1061*38fd1498Szrj || (TREE_CODE (stmt) == AGGR_INIT_EXPR && AGGR_INIT_FROM_THUNK_P (stmt))) 1062*38fd1498Szrj { 1063*38fd1498Szrj *walk_subtrees = 0; 1064*38fd1498Szrj return NULL; 1065*38fd1498Szrj } 1066*38fd1498Szrj 1067*38fd1498Szrj /* Dereference invisible reference parms. */ 1068*38fd1498Szrj if (wtd->handle_invisiref_parm_p && is_invisiref_parm (stmt)) 1069*38fd1498Szrj { 1070*38fd1498Szrj *stmt_p = convert_from_reference (stmt); 1071*38fd1498Szrj p_set->add (*stmt_p); 1072*38fd1498Szrj *walk_subtrees = 0; 1073*38fd1498Szrj return NULL; 1074*38fd1498Szrj } 1075*38fd1498Szrj 1076*38fd1498Szrj /* Map block scope extern declarations to visible declarations with the 1077*38fd1498Szrj same name and type in outer scopes if any. */ 1078*38fd1498Szrj if (cp_function_chain->extern_decl_map 1079*38fd1498Szrj && VAR_OR_FUNCTION_DECL_P (stmt) 1080*38fd1498Szrj && DECL_EXTERNAL (stmt)) 1081*38fd1498Szrj { 1082*38fd1498Szrj struct cxx_int_tree_map *h, in; 1083*38fd1498Szrj in.uid = DECL_UID (stmt); 1084*38fd1498Szrj h = cp_function_chain->extern_decl_map->find_with_hash (&in, in.uid); 1085*38fd1498Szrj if (h) 1086*38fd1498Szrj { 1087*38fd1498Szrj *stmt_p = h->to; 1088*38fd1498Szrj *walk_subtrees = 0; 1089*38fd1498Szrj return NULL; 1090*38fd1498Szrj } 1091*38fd1498Szrj } 1092*38fd1498Szrj 1093*38fd1498Szrj if (TREE_CODE (stmt) == INTEGER_CST 1094*38fd1498Szrj && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE 1095*38fd1498Szrj && (flag_sanitize & (SANITIZE_NULL | SANITIZE_ALIGNMENT)) 1096*38fd1498Szrj && !wtd->no_sanitize_p) 1097*38fd1498Szrj { 1098*38fd1498Szrj ubsan_maybe_instrument_reference (stmt_p); 1099*38fd1498Szrj if (*stmt_p != stmt) 1100*38fd1498Szrj { 1101*38fd1498Szrj *walk_subtrees = 0; 1102*38fd1498Szrj return NULL_TREE; 1103*38fd1498Szrj } 1104*38fd1498Szrj } 1105*38fd1498Szrj 1106*38fd1498Szrj /* Other than invisiref parms, don't walk the same tree twice. */ 1107*38fd1498Szrj if (p_set->contains (stmt)) 1108*38fd1498Szrj { 1109*38fd1498Szrj *walk_subtrees = 0; 1110*38fd1498Szrj return NULL_TREE; 1111*38fd1498Szrj } 1112*38fd1498Szrj 1113*38fd1498Szrj switch (TREE_CODE (stmt)) 1114*38fd1498Szrj { 1115*38fd1498Szrj case ADDR_EXPR: 1116*38fd1498Szrj if (is_invisiref_parm (TREE_OPERAND (stmt, 0))) 1117*38fd1498Szrj { 1118*38fd1498Szrj /* If in an OpenMP context, note var uses. */ 1119*38fd1498Szrj if (__builtin_expect (wtd->omp_ctx != NULL, 0) 1120*38fd1498Szrj && omp_var_to_track (TREE_OPERAND (stmt, 0))) 1121*38fd1498Szrj omp_cxx_notice_variable (wtd->omp_ctx, TREE_OPERAND (stmt, 0)); 1122*38fd1498Szrj *stmt_p = fold_convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0)); 1123*38fd1498Szrj *walk_subtrees = 0; 1124*38fd1498Szrj } 1125*38fd1498Szrj break; 1126*38fd1498Szrj 1127*38fd1498Szrj case RETURN_EXPR: 1128*38fd1498Szrj if (TREE_OPERAND (stmt, 0) && is_invisiref_parm (TREE_OPERAND (stmt, 0))) 1129*38fd1498Szrj /* Don't dereference an invisiref RESULT_DECL inside a RETURN_EXPR. */ 1130*38fd1498Szrj *walk_subtrees = 0; 1131*38fd1498Szrj break; 1132*38fd1498Szrj 1133*38fd1498Szrj case OMP_CLAUSE: 1134*38fd1498Szrj switch (OMP_CLAUSE_CODE (stmt)) 1135*38fd1498Szrj { 1136*38fd1498Szrj case OMP_CLAUSE_LASTPRIVATE: 1137*38fd1498Szrj /* Don't dereference an invisiref in OpenMP clauses. */ 1138*38fd1498Szrj if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) 1139*38fd1498Szrj { 1140*38fd1498Szrj *walk_subtrees = 0; 1141*38fd1498Szrj if (OMP_CLAUSE_LASTPRIVATE_STMT (stmt)) 1142*38fd1498Szrj cp_walk_tree (&OMP_CLAUSE_LASTPRIVATE_STMT (stmt), 1143*38fd1498Szrj cp_genericize_r, data, NULL); 1144*38fd1498Szrj } 1145*38fd1498Szrj break; 1146*38fd1498Szrj case OMP_CLAUSE_PRIVATE: 1147*38fd1498Szrj /* Don't dereference an invisiref in OpenMP clauses. */ 1148*38fd1498Szrj if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) 1149*38fd1498Szrj *walk_subtrees = 0; 1150*38fd1498Szrj else if (wtd->omp_ctx != NULL) 1151*38fd1498Szrj { 1152*38fd1498Szrj /* Private clause doesn't cause any references to the 1153*38fd1498Szrj var in outer contexts, avoid calling 1154*38fd1498Szrj omp_cxx_notice_variable for it. */ 1155*38fd1498Szrj struct cp_genericize_omp_taskreg *old = wtd->omp_ctx; 1156*38fd1498Szrj wtd->omp_ctx = NULL; 1157*38fd1498Szrj cp_walk_tree (&OMP_CLAUSE_DECL (stmt), cp_genericize_r, 1158*38fd1498Szrj data, NULL); 1159*38fd1498Szrj wtd->omp_ctx = old; 1160*38fd1498Szrj *walk_subtrees = 0; 1161*38fd1498Szrj } 1162*38fd1498Szrj break; 1163*38fd1498Szrj case OMP_CLAUSE_SHARED: 1164*38fd1498Szrj case OMP_CLAUSE_FIRSTPRIVATE: 1165*38fd1498Szrj case OMP_CLAUSE_COPYIN: 1166*38fd1498Szrj case OMP_CLAUSE_COPYPRIVATE: 1167*38fd1498Szrj /* Don't dereference an invisiref in OpenMP clauses. */ 1168*38fd1498Szrj if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) 1169*38fd1498Szrj *walk_subtrees = 0; 1170*38fd1498Szrj break; 1171*38fd1498Szrj case OMP_CLAUSE_REDUCTION: 1172*38fd1498Szrj /* Don't dereference an invisiref in reduction clause's 1173*38fd1498Szrj OMP_CLAUSE_DECL either. OMP_CLAUSE_REDUCTION_{INIT,MERGE} 1174*38fd1498Szrj still needs to be genericized. */ 1175*38fd1498Szrj if (is_invisiref_parm (OMP_CLAUSE_DECL (stmt))) 1176*38fd1498Szrj { 1177*38fd1498Szrj *walk_subtrees = 0; 1178*38fd1498Szrj if (OMP_CLAUSE_REDUCTION_INIT (stmt)) 1179*38fd1498Szrj cp_walk_tree (&OMP_CLAUSE_REDUCTION_INIT (stmt), 1180*38fd1498Szrj cp_genericize_r, data, NULL); 1181*38fd1498Szrj if (OMP_CLAUSE_REDUCTION_MERGE (stmt)) 1182*38fd1498Szrj cp_walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (stmt), 1183*38fd1498Szrj cp_genericize_r, data, NULL); 1184*38fd1498Szrj } 1185*38fd1498Szrj break; 1186*38fd1498Szrj default: 1187*38fd1498Szrj break; 1188*38fd1498Szrj } 1189*38fd1498Szrj break; 1190*38fd1498Szrj 1191*38fd1498Szrj /* Due to the way voidify_wrapper_expr is written, we don't get a chance 1192*38fd1498Szrj to lower this construct before scanning it, so we need to lower these 1193*38fd1498Szrj before doing anything else. */ 1194*38fd1498Szrj case CLEANUP_STMT: 1195*38fd1498Szrj *stmt_p = build2_loc (EXPR_LOCATION (stmt), 1196*38fd1498Szrj CLEANUP_EH_ONLY (stmt) ? TRY_CATCH_EXPR 1197*38fd1498Szrj : TRY_FINALLY_EXPR, 1198*38fd1498Szrj void_type_node, 1199*38fd1498Szrj CLEANUP_BODY (stmt), 1200*38fd1498Szrj CLEANUP_EXPR (stmt)); 1201*38fd1498Szrj break; 1202*38fd1498Szrj 1203*38fd1498Szrj case IF_STMT: 1204*38fd1498Szrj genericize_if_stmt (stmt_p); 1205*38fd1498Szrj /* *stmt_p has changed, tail recurse to handle it again. */ 1206*38fd1498Szrj return cp_genericize_r (stmt_p, walk_subtrees, data); 1207*38fd1498Szrj 1208*38fd1498Szrj /* COND_EXPR might have incompatible types in branches if one or both 1209*38fd1498Szrj arms are bitfields. Fix it up now. */ 1210*38fd1498Szrj case COND_EXPR: 1211*38fd1498Szrj { 1212*38fd1498Szrj tree type_left 1213*38fd1498Szrj = (TREE_OPERAND (stmt, 1) 1214*38fd1498Szrj ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 1)) 1215*38fd1498Szrj : NULL_TREE); 1216*38fd1498Szrj tree type_right 1217*38fd1498Szrj = (TREE_OPERAND (stmt, 2) 1218*38fd1498Szrj ? is_bitfield_expr_with_lowered_type (TREE_OPERAND (stmt, 2)) 1219*38fd1498Szrj : NULL_TREE); 1220*38fd1498Szrj if (type_left 1221*38fd1498Szrj && !useless_type_conversion_p (TREE_TYPE (stmt), 1222*38fd1498Szrj TREE_TYPE (TREE_OPERAND (stmt, 1)))) 1223*38fd1498Szrj { 1224*38fd1498Szrj TREE_OPERAND (stmt, 1) 1225*38fd1498Szrj = fold_convert (type_left, TREE_OPERAND (stmt, 1)); 1226*38fd1498Szrj gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), 1227*38fd1498Szrj type_left)); 1228*38fd1498Szrj } 1229*38fd1498Szrj if (type_right 1230*38fd1498Szrj && !useless_type_conversion_p (TREE_TYPE (stmt), 1231*38fd1498Szrj TREE_TYPE (TREE_OPERAND (stmt, 2)))) 1232*38fd1498Szrj { 1233*38fd1498Szrj TREE_OPERAND (stmt, 2) 1234*38fd1498Szrj = fold_convert (type_right, TREE_OPERAND (stmt, 2)); 1235*38fd1498Szrj gcc_assert (useless_type_conversion_p (TREE_TYPE (stmt), 1236*38fd1498Szrj type_right)); 1237*38fd1498Szrj } 1238*38fd1498Szrj } 1239*38fd1498Szrj break; 1240*38fd1498Szrj 1241*38fd1498Szrj case BIND_EXPR: 1242*38fd1498Szrj if (__builtin_expect (wtd->omp_ctx != NULL, 0)) 1243*38fd1498Szrj { 1244*38fd1498Szrj tree decl; 1245*38fd1498Szrj for (decl = BIND_EXPR_VARS (stmt); decl; decl = DECL_CHAIN (decl)) 1246*38fd1498Szrj if (VAR_P (decl) 1247*38fd1498Szrj && !DECL_EXTERNAL (decl) 1248*38fd1498Szrj && omp_var_to_track (decl)) 1249*38fd1498Szrj { 1250*38fd1498Szrj splay_tree_node n 1251*38fd1498Szrj = splay_tree_lookup (wtd->omp_ctx->variables, 1252*38fd1498Szrj (splay_tree_key) decl); 1253*38fd1498Szrj if (n == NULL) 1254*38fd1498Szrj splay_tree_insert (wtd->omp_ctx->variables, 1255*38fd1498Szrj (splay_tree_key) decl, 1256*38fd1498Szrj TREE_STATIC (decl) 1257*38fd1498Szrj ? OMP_CLAUSE_DEFAULT_SHARED 1258*38fd1498Szrj : OMP_CLAUSE_DEFAULT_PRIVATE); 1259*38fd1498Szrj } 1260*38fd1498Szrj } 1261*38fd1498Szrj if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT | SANITIZE_VPTR)) 1262*38fd1498Szrj { 1263*38fd1498Szrj /* The point here is to not sanitize static initializers. */ 1264*38fd1498Szrj bool no_sanitize_p = wtd->no_sanitize_p; 1265*38fd1498Szrj wtd->no_sanitize_p = true; 1266*38fd1498Szrj for (tree decl = BIND_EXPR_VARS (stmt); 1267*38fd1498Szrj decl; 1268*38fd1498Szrj decl = DECL_CHAIN (decl)) 1269*38fd1498Szrj if (VAR_P (decl) 1270*38fd1498Szrj && TREE_STATIC (decl) 1271*38fd1498Szrj && DECL_INITIAL (decl)) 1272*38fd1498Szrj cp_walk_tree (&DECL_INITIAL (decl), cp_genericize_r, data, NULL); 1273*38fd1498Szrj wtd->no_sanitize_p = no_sanitize_p; 1274*38fd1498Szrj } 1275*38fd1498Szrj wtd->bind_expr_stack.safe_push (stmt); 1276*38fd1498Szrj cp_walk_tree (&BIND_EXPR_BODY (stmt), 1277*38fd1498Szrj cp_genericize_r, data, NULL); 1278*38fd1498Szrj wtd->bind_expr_stack.pop (); 1279*38fd1498Szrj break; 1280*38fd1498Szrj 1281*38fd1498Szrj case USING_STMT: 1282*38fd1498Szrj { 1283*38fd1498Szrj tree block = NULL_TREE; 1284*38fd1498Szrj 1285*38fd1498Szrj /* Get the innermost inclosing GIMPLE_BIND that has a non NULL 1286*38fd1498Szrj BLOCK, and append an IMPORTED_DECL to its 1287*38fd1498Szrj BLOCK_VARS chained list. */ 1288*38fd1498Szrj if (wtd->bind_expr_stack.exists ()) 1289*38fd1498Szrj { 1290*38fd1498Szrj int i; 1291*38fd1498Szrj for (i = wtd->bind_expr_stack.length () - 1; i >= 0; i--) 1292*38fd1498Szrj if ((block = BIND_EXPR_BLOCK (wtd->bind_expr_stack[i]))) 1293*38fd1498Szrj break; 1294*38fd1498Szrj } 1295*38fd1498Szrj if (block) 1296*38fd1498Szrj { 1297*38fd1498Szrj tree decl = TREE_OPERAND (stmt, 0); 1298*38fd1498Szrj gcc_assert (decl); 1299*38fd1498Szrj 1300*38fd1498Szrj if (undeduced_auto_decl (decl)) 1301*38fd1498Szrj /* Omit from the GENERIC, the back-end can't handle it. */; 1302*38fd1498Szrj else 1303*38fd1498Szrj { 1304*38fd1498Szrj tree using_directive = make_node (IMPORTED_DECL); 1305*38fd1498Szrj TREE_TYPE (using_directive) = void_type_node; 1306*38fd1498Szrj 1307*38fd1498Szrj IMPORTED_DECL_ASSOCIATED_DECL (using_directive) = decl; 1308*38fd1498Szrj DECL_CHAIN (using_directive) = BLOCK_VARS (block); 1309*38fd1498Szrj BLOCK_VARS (block) = using_directive; 1310*38fd1498Szrj } 1311*38fd1498Szrj } 1312*38fd1498Szrj /* The USING_STMT won't appear in GENERIC. */ 1313*38fd1498Szrj *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); 1314*38fd1498Szrj *walk_subtrees = 0; 1315*38fd1498Szrj } 1316*38fd1498Szrj break; 1317*38fd1498Szrj 1318*38fd1498Szrj case DECL_EXPR: 1319*38fd1498Szrj if (TREE_CODE (DECL_EXPR_DECL (stmt)) == USING_DECL) 1320*38fd1498Szrj { 1321*38fd1498Szrj /* Using decls inside DECL_EXPRs are just dropped on the floor. */ 1322*38fd1498Szrj *stmt_p = build1 (NOP_EXPR, void_type_node, integer_zero_node); 1323*38fd1498Szrj *walk_subtrees = 0; 1324*38fd1498Szrj } 1325*38fd1498Szrj else 1326*38fd1498Szrj { 1327*38fd1498Szrj tree d = DECL_EXPR_DECL (stmt); 1328*38fd1498Szrj if (VAR_P (d)) 1329*38fd1498Szrj gcc_assert (CP_DECL_THREAD_LOCAL_P (d) == DECL_THREAD_LOCAL_P (d)); 1330*38fd1498Szrj } 1331*38fd1498Szrj break; 1332*38fd1498Szrj 1333*38fd1498Szrj case OMP_PARALLEL: 1334*38fd1498Szrj case OMP_TASK: 1335*38fd1498Szrj case OMP_TASKLOOP: 1336*38fd1498Szrj { 1337*38fd1498Szrj struct cp_genericize_omp_taskreg omp_ctx; 1338*38fd1498Szrj tree c, decl; 1339*38fd1498Szrj splay_tree_node n; 1340*38fd1498Szrj 1341*38fd1498Szrj *walk_subtrees = 0; 1342*38fd1498Szrj cp_walk_tree (&OMP_CLAUSES (stmt), cp_genericize_r, data, NULL); 1343*38fd1498Szrj omp_ctx.is_parallel = TREE_CODE (stmt) == OMP_PARALLEL; 1344*38fd1498Szrj omp_ctx.default_shared = omp_ctx.is_parallel; 1345*38fd1498Szrj omp_ctx.outer = wtd->omp_ctx; 1346*38fd1498Szrj omp_ctx.variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0); 1347*38fd1498Szrj wtd->omp_ctx = &omp_ctx; 1348*38fd1498Szrj for (c = OMP_CLAUSES (stmt); c; c = OMP_CLAUSE_CHAIN (c)) 1349*38fd1498Szrj switch (OMP_CLAUSE_CODE (c)) 1350*38fd1498Szrj { 1351*38fd1498Szrj case OMP_CLAUSE_SHARED: 1352*38fd1498Szrj case OMP_CLAUSE_PRIVATE: 1353*38fd1498Szrj case OMP_CLAUSE_FIRSTPRIVATE: 1354*38fd1498Szrj case OMP_CLAUSE_LASTPRIVATE: 1355*38fd1498Szrj decl = OMP_CLAUSE_DECL (c); 1356*38fd1498Szrj if (decl == error_mark_node || !omp_var_to_track (decl)) 1357*38fd1498Szrj break; 1358*38fd1498Szrj n = splay_tree_lookup (omp_ctx.variables, (splay_tree_key) decl); 1359*38fd1498Szrj if (n != NULL) 1360*38fd1498Szrj break; 1361*38fd1498Szrj splay_tree_insert (omp_ctx.variables, (splay_tree_key) decl, 1362*38fd1498Szrj OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED 1363*38fd1498Szrj ? OMP_CLAUSE_DEFAULT_SHARED 1364*38fd1498Szrj : OMP_CLAUSE_DEFAULT_PRIVATE); 1365*38fd1498Szrj if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE && omp_ctx.outer) 1366*38fd1498Szrj omp_cxx_notice_variable (omp_ctx.outer, decl); 1367*38fd1498Szrj break; 1368*38fd1498Szrj case OMP_CLAUSE_DEFAULT: 1369*38fd1498Szrj if (OMP_CLAUSE_DEFAULT_KIND (c) == OMP_CLAUSE_DEFAULT_SHARED) 1370*38fd1498Szrj omp_ctx.default_shared = true; 1371*38fd1498Szrj default: 1372*38fd1498Szrj break; 1373*38fd1498Szrj } 1374*38fd1498Szrj if (TREE_CODE (stmt) == OMP_TASKLOOP) 1375*38fd1498Szrj genericize_omp_for_stmt (stmt_p, walk_subtrees, data); 1376*38fd1498Szrj else 1377*38fd1498Szrj cp_walk_tree (&OMP_BODY (stmt), cp_genericize_r, data, NULL); 1378*38fd1498Szrj wtd->omp_ctx = omp_ctx.outer; 1379*38fd1498Szrj splay_tree_delete (omp_ctx.variables); 1380*38fd1498Szrj } 1381*38fd1498Szrj break; 1382*38fd1498Szrj 1383*38fd1498Szrj case TRY_BLOCK: 1384*38fd1498Szrj { 1385*38fd1498Szrj *walk_subtrees = 0; 1386*38fd1498Szrj tree try_block = wtd->try_block; 1387*38fd1498Szrj wtd->try_block = stmt; 1388*38fd1498Szrj cp_walk_tree (&TRY_STMTS (stmt), cp_genericize_r, data, NULL); 1389*38fd1498Szrj wtd->try_block = try_block; 1390*38fd1498Szrj cp_walk_tree (&TRY_HANDLERS (stmt), cp_genericize_r, data, NULL); 1391*38fd1498Szrj } 1392*38fd1498Szrj break; 1393*38fd1498Szrj 1394*38fd1498Szrj case MUST_NOT_THROW_EXPR: 1395*38fd1498Szrj /* MUST_NOT_THROW_COND might be something else with TM. */ 1396*38fd1498Szrj if (MUST_NOT_THROW_COND (stmt) == NULL_TREE) 1397*38fd1498Szrj { 1398*38fd1498Szrj *walk_subtrees = 0; 1399*38fd1498Szrj tree try_block = wtd->try_block; 1400*38fd1498Szrj wtd->try_block = stmt; 1401*38fd1498Szrj cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL); 1402*38fd1498Szrj wtd->try_block = try_block; 1403*38fd1498Szrj } 1404*38fd1498Szrj break; 1405*38fd1498Szrj 1406*38fd1498Szrj case THROW_EXPR: 1407*38fd1498Szrj { 1408*38fd1498Szrj location_t loc = location_of (stmt); 1409*38fd1498Szrj if (TREE_NO_WARNING (stmt)) 1410*38fd1498Szrj /* Never mind. */; 1411*38fd1498Szrj else if (wtd->try_block) 1412*38fd1498Szrj { 1413*38fd1498Szrj if (TREE_CODE (wtd->try_block) == MUST_NOT_THROW_EXPR 1414*38fd1498Szrj && warning_at (loc, OPT_Wterminate, 1415*38fd1498Szrj "throw will always call terminate()") 1416*38fd1498Szrj && cxx_dialect >= cxx11 1417*38fd1498Szrj && DECL_DESTRUCTOR_P (current_function_decl)) 1418*38fd1498Szrj inform (loc, "in C++11 destructors default to noexcept"); 1419*38fd1498Szrj } 1420*38fd1498Szrj else 1421*38fd1498Szrj { 1422*38fd1498Szrj if (warn_cxx11_compat && cxx_dialect < cxx11 1423*38fd1498Szrj && DECL_DESTRUCTOR_P (current_function_decl) 1424*38fd1498Szrj && (TYPE_RAISES_EXCEPTIONS (TREE_TYPE (current_function_decl)) 1425*38fd1498Szrj == NULL_TREE) 1426*38fd1498Szrj && (get_defaulted_eh_spec (current_function_decl) 1427*38fd1498Szrj == empty_except_spec)) 1428*38fd1498Szrj warning_at (loc, OPT_Wc__11_compat, 1429*38fd1498Szrj "in C++11 this throw will terminate because " 1430*38fd1498Szrj "destructors default to noexcept"); 1431*38fd1498Szrj } 1432*38fd1498Szrj } 1433*38fd1498Szrj break; 1434*38fd1498Szrj 1435*38fd1498Szrj case CONVERT_EXPR: 1436*38fd1498Szrj gcc_assert (!CONVERT_EXPR_VBASE_PATH (stmt)); 1437*38fd1498Szrj break; 1438*38fd1498Szrj 1439*38fd1498Szrj case FOR_STMT: 1440*38fd1498Szrj genericize_for_stmt (stmt_p, walk_subtrees, data); 1441*38fd1498Szrj break; 1442*38fd1498Szrj 1443*38fd1498Szrj case WHILE_STMT: 1444*38fd1498Szrj genericize_while_stmt (stmt_p, walk_subtrees, data); 1445*38fd1498Szrj break; 1446*38fd1498Szrj 1447*38fd1498Szrj case DO_STMT: 1448*38fd1498Szrj genericize_do_stmt (stmt_p, walk_subtrees, data); 1449*38fd1498Szrj break; 1450*38fd1498Szrj 1451*38fd1498Szrj case SWITCH_STMT: 1452*38fd1498Szrj genericize_switch_stmt (stmt_p, walk_subtrees, data); 1453*38fd1498Szrj break; 1454*38fd1498Szrj 1455*38fd1498Szrj case CONTINUE_STMT: 1456*38fd1498Szrj genericize_continue_stmt (stmt_p); 1457*38fd1498Szrj break; 1458*38fd1498Szrj 1459*38fd1498Szrj case BREAK_STMT: 1460*38fd1498Szrj genericize_break_stmt (stmt_p); 1461*38fd1498Szrj break; 1462*38fd1498Szrj 1463*38fd1498Szrj case OMP_FOR: 1464*38fd1498Szrj case OMP_SIMD: 1465*38fd1498Szrj case OMP_DISTRIBUTE: 1466*38fd1498Szrj genericize_omp_for_stmt (stmt_p, walk_subtrees, data); 1467*38fd1498Szrj break; 1468*38fd1498Szrj 1469*38fd1498Szrj case PTRMEM_CST: 1470*38fd1498Szrj /* By the time we get here we're handing off to the back end, so we don't 1471*38fd1498Szrj need or want to preserve PTRMEM_CST anymore. */ 1472*38fd1498Szrj *stmt_p = cplus_expand_constant (stmt); 1473*38fd1498Szrj *walk_subtrees = 0; 1474*38fd1498Szrj break; 1475*38fd1498Szrj 1476*38fd1498Szrj case MEM_REF: 1477*38fd1498Szrj /* For MEM_REF, make sure not to sanitize the second operand even 1478*38fd1498Szrj if it has reference type. It is just an offset with a type 1479*38fd1498Szrj holding other information. There is no other processing we 1480*38fd1498Szrj need to do for INTEGER_CSTs, so just ignore the second argument 1481*38fd1498Szrj unconditionally. */ 1482*38fd1498Szrj cp_walk_tree (&TREE_OPERAND (stmt, 0), cp_genericize_r, data, NULL); 1483*38fd1498Szrj *walk_subtrees = 0; 1484*38fd1498Szrj break; 1485*38fd1498Szrj 1486*38fd1498Szrj case NOP_EXPR: 1487*38fd1498Szrj if (!wtd->no_sanitize_p 1488*38fd1498Szrj && sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT) 1489*38fd1498Szrj && TREE_CODE (TREE_TYPE (stmt)) == REFERENCE_TYPE) 1490*38fd1498Szrj ubsan_maybe_instrument_reference (stmt_p); 1491*38fd1498Szrj break; 1492*38fd1498Szrj 1493*38fd1498Szrj case CALL_EXPR: 1494*38fd1498Szrj if (!wtd->no_sanitize_p 1495*38fd1498Szrj && sanitize_flags_p ((SANITIZE_NULL 1496*38fd1498Szrj | SANITIZE_ALIGNMENT | SANITIZE_VPTR))) 1497*38fd1498Szrj { 1498*38fd1498Szrj tree fn = CALL_EXPR_FN (stmt); 1499*38fd1498Szrj if (fn != NULL_TREE 1500*38fd1498Szrj && !error_operand_p (fn) 1501*38fd1498Szrj && POINTER_TYPE_P (TREE_TYPE (fn)) 1502*38fd1498Szrj && TREE_CODE (TREE_TYPE (TREE_TYPE (fn))) == METHOD_TYPE) 1503*38fd1498Szrj { 1504*38fd1498Szrj bool is_ctor 1505*38fd1498Szrj = TREE_CODE (fn) == ADDR_EXPR 1506*38fd1498Szrj && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL 1507*38fd1498Szrj && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0)); 1508*38fd1498Szrj if (sanitize_flags_p (SANITIZE_NULL | SANITIZE_ALIGNMENT)) 1509*38fd1498Szrj ubsan_maybe_instrument_member_call (stmt, is_ctor); 1510*38fd1498Szrj if (sanitize_flags_p (SANITIZE_VPTR) && !is_ctor) 1511*38fd1498Szrj cp_ubsan_maybe_instrument_member_call (stmt); 1512*38fd1498Szrj } 1513*38fd1498Szrj else if (fn == NULL_TREE 1514*38fd1498Szrj && CALL_EXPR_IFN (stmt) == IFN_UBSAN_NULL 1515*38fd1498Szrj && TREE_CODE (CALL_EXPR_ARG (stmt, 0)) == INTEGER_CST 1516*38fd1498Szrj && (TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (stmt, 0))) 1517*38fd1498Szrj == REFERENCE_TYPE)) 1518*38fd1498Szrj *walk_subtrees = 0; 1519*38fd1498Szrj } 1520*38fd1498Szrj /* Fall through. */ 1521*38fd1498Szrj case AGGR_INIT_EXPR: 1522*38fd1498Szrj /* For calls to a multi-versioned function, overload resolution 1523*38fd1498Szrj returns the function with the highest target priority, that is, 1524*38fd1498Szrj the version that will checked for dispatching first. If this 1525*38fd1498Szrj version is inlinable, a direct call to this version can be made 1526*38fd1498Szrj otherwise the call should go through the dispatcher. */ 1527*38fd1498Szrj { 1528*38fd1498Szrj tree fn = cp_get_callee_fndecl_nofold (stmt); 1529*38fd1498Szrj if (fn && DECL_FUNCTION_VERSIONED (fn) 1530*38fd1498Szrj && (current_function_decl == NULL 1531*38fd1498Szrj || !targetm.target_option.can_inline_p (current_function_decl, 1532*38fd1498Szrj fn))) 1533*38fd1498Szrj if (tree dis = get_function_version_dispatcher (fn)) 1534*38fd1498Szrj { 1535*38fd1498Szrj mark_versions_used (dis); 1536*38fd1498Szrj dis = build_address (dis); 1537*38fd1498Szrj if (TREE_CODE (stmt) == CALL_EXPR) 1538*38fd1498Szrj CALL_EXPR_FN (stmt) = dis; 1539*38fd1498Szrj else 1540*38fd1498Szrj AGGR_INIT_EXPR_FN (stmt) = dis; 1541*38fd1498Szrj } 1542*38fd1498Szrj } 1543*38fd1498Szrj break; 1544*38fd1498Szrj 1545*38fd1498Szrj case TARGET_EXPR: 1546*38fd1498Szrj if (TARGET_EXPR_INITIAL (stmt) 1547*38fd1498Szrj && TREE_CODE (TARGET_EXPR_INITIAL (stmt)) == CONSTRUCTOR 1548*38fd1498Szrj && CONSTRUCTOR_PLACEHOLDER_BOUNDARY (TARGET_EXPR_INITIAL (stmt))) 1549*38fd1498Szrj TARGET_EXPR_NO_ELIDE (stmt) = 1; 1550*38fd1498Szrj break; 1551*38fd1498Szrj 1552*38fd1498Szrj default: 1553*38fd1498Szrj if (IS_TYPE_OR_DECL_P (stmt)) 1554*38fd1498Szrj *walk_subtrees = 0; 1555*38fd1498Szrj break; 1556*38fd1498Szrj } 1557*38fd1498Szrj 1558*38fd1498Szrj p_set->add (*stmt_p); 1559*38fd1498Szrj 1560*38fd1498Szrj return NULL; 1561*38fd1498Szrj } 1562*38fd1498Szrj 1563*38fd1498Szrj /* Lower C++ front end trees to GENERIC in T_P. */ 1564*38fd1498Szrj 1565*38fd1498Szrj static void 1566*38fd1498Szrj cp_genericize_tree (tree* t_p, bool handle_invisiref_parm_p) 1567*38fd1498Szrj { 1568*38fd1498Szrj struct cp_genericize_data wtd; 1569*38fd1498Szrj 1570*38fd1498Szrj wtd.p_set = new hash_set<tree>; 1571*38fd1498Szrj wtd.bind_expr_stack.create (0); 1572*38fd1498Szrj wtd.omp_ctx = NULL; 1573*38fd1498Szrj wtd.try_block = NULL_TREE; 1574*38fd1498Szrj wtd.no_sanitize_p = false; 1575*38fd1498Szrj wtd.handle_invisiref_parm_p = handle_invisiref_parm_p; 1576*38fd1498Szrj cp_walk_tree (t_p, cp_genericize_r, &wtd, NULL); 1577*38fd1498Szrj delete wtd.p_set; 1578*38fd1498Szrj wtd.bind_expr_stack.release (); 1579*38fd1498Szrj if (sanitize_flags_p (SANITIZE_VPTR)) 1580*38fd1498Szrj cp_ubsan_instrument_member_accesses (t_p); 1581*38fd1498Szrj } 1582*38fd1498Szrj 1583*38fd1498Szrj /* If a function that should end with a return in non-void 1584*38fd1498Szrj function doesn't obviously end with return, add ubsan 1585*38fd1498Szrj instrumentation code to verify it at runtime. If -fsanitize=return 1586*38fd1498Szrj is not enabled, instrument __builtin_unreachable. */ 1587*38fd1498Szrj 1588*38fd1498Szrj static void 1589*38fd1498Szrj cp_maybe_instrument_return (tree fndecl) 1590*38fd1498Szrj { 1591*38fd1498Szrj if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl))) 1592*38fd1498Szrj || DECL_CONSTRUCTOR_P (fndecl) 1593*38fd1498Szrj || DECL_DESTRUCTOR_P (fndecl) 1594*38fd1498Szrj || !targetm.warn_func_return (fndecl)) 1595*38fd1498Szrj return; 1596*38fd1498Szrj 1597*38fd1498Szrj if (!sanitize_flags_p (SANITIZE_RETURN, fndecl) 1598*38fd1498Szrj /* Don't add __builtin_unreachable () if not optimizing, it will not 1599*38fd1498Szrj improve any optimizations in that case, just break UB code. 1600*38fd1498Szrj Don't add it if -fsanitize=unreachable -fno-sanitize=return either, 1601*38fd1498Szrj UBSan covers this with ubsan_instrument_return above where sufficient 1602*38fd1498Szrj information is provided, while the __builtin_unreachable () below 1603*38fd1498Szrj if return sanitization is disabled will just result in hard to 1604*38fd1498Szrj understand runtime error without location. */ 1605*38fd1498Szrj && (!optimize 1606*38fd1498Szrj || sanitize_flags_p (SANITIZE_UNREACHABLE, fndecl))) 1607*38fd1498Szrj return; 1608*38fd1498Szrj 1609*38fd1498Szrj tree t = DECL_SAVED_TREE (fndecl); 1610*38fd1498Szrj while (t) 1611*38fd1498Szrj { 1612*38fd1498Szrj switch (TREE_CODE (t)) 1613*38fd1498Szrj { 1614*38fd1498Szrj case BIND_EXPR: 1615*38fd1498Szrj t = BIND_EXPR_BODY (t); 1616*38fd1498Szrj continue; 1617*38fd1498Szrj case TRY_FINALLY_EXPR: 1618*38fd1498Szrj case CLEANUP_POINT_EXPR: 1619*38fd1498Szrj t = TREE_OPERAND (t, 0); 1620*38fd1498Szrj continue; 1621*38fd1498Szrj case STATEMENT_LIST: 1622*38fd1498Szrj { 1623*38fd1498Szrj tree_stmt_iterator i = tsi_last (t); 1624*38fd1498Szrj if (!tsi_end_p (i)) 1625*38fd1498Szrj { 1626*38fd1498Szrj t = tsi_stmt (i); 1627*38fd1498Szrj continue; 1628*38fd1498Szrj } 1629*38fd1498Szrj } 1630*38fd1498Szrj break; 1631*38fd1498Szrj case RETURN_EXPR: 1632*38fd1498Szrj return; 1633*38fd1498Szrj default: 1634*38fd1498Szrj break; 1635*38fd1498Szrj } 1636*38fd1498Szrj break; 1637*38fd1498Szrj } 1638*38fd1498Szrj if (t == NULL_TREE) 1639*38fd1498Szrj return; 1640*38fd1498Szrj tree *p = &DECL_SAVED_TREE (fndecl); 1641*38fd1498Szrj if (TREE_CODE (*p) == BIND_EXPR) 1642*38fd1498Szrj p = &BIND_EXPR_BODY (*p); 1643*38fd1498Szrj 1644*38fd1498Szrj location_t loc = DECL_SOURCE_LOCATION (fndecl); 1645*38fd1498Szrj if (sanitize_flags_p (SANITIZE_RETURN, fndecl)) 1646*38fd1498Szrj t = ubsan_instrument_return (loc); 1647*38fd1498Szrj else 1648*38fd1498Szrj { 1649*38fd1498Szrj tree fndecl = builtin_decl_explicit (BUILT_IN_UNREACHABLE); 1650*38fd1498Szrj t = build_call_expr_loc (BUILTINS_LOCATION, fndecl, 0); 1651*38fd1498Szrj } 1652*38fd1498Szrj 1653*38fd1498Szrj append_to_statement_list (t, p); 1654*38fd1498Szrj } 1655*38fd1498Szrj 1656*38fd1498Szrj void 1657*38fd1498Szrj cp_genericize (tree fndecl) 1658*38fd1498Szrj { 1659*38fd1498Szrj tree t; 1660*38fd1498Szrj 1661*38fd1498Szrj /* Fix up the types of parms passed by invisible reference. */ 1662*38fd1498Szrj for (t = DECL_ARGUMENTS (fndecl); t; t = DECL_CHAIN (t)) 1663*38fd1498Szrj if (TREE_ADDRESSABLE (TREE_TYPE (t))) 1664*38fd1498Szrj { 1665*38fd1498Szrj /* If a function's arguments are copied to create a thunk, 1666*38fd1498Szrj then DECL_BY_REFERENCE will be set -- but the type of the 1667*38fd1498Szrj argument will be a pointer type, so we will never get 1668*38fd1498Szrj here. */ 1669*38fd1498Szrj gcc_assert (!DECL_BY_REFERENCE (t)); 1670*38fd1498Szrj gcc_assert (DECL_ARG_TYPE (t) != TREE_TYPE (t)); 1671*38fd1498Szrj TREE_TYPE (t) = DECL_ARG_TYPE (t); 1672*38fd1498Szrj DECL_BY_REFERENCE (t) = 1; 1673*38fd1498Szrj TREE_ADDRESSABLE (t) = 0; 1674*38fd1498Szrj relayout_decl (t); 1675*38fd1498Szrj } 1676*38fd1498Szrj 1677*38fd1498Szrj /* Do the same for the return value. */ 1678*38fd1498Szrj if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (fndecl)))) 1679*38fd1498Szrj { 1680*38fd1498Szrj t = DECL_RESULT (fndecl); 1681*38fd1498Szrj TREE_TYPE (t) = build_reference_type (TREE_TYPE (t)); 1682*38fd1498Szrj DECL_BY_REFERENCE (t) = 1; 1683*38fd1498Szrj TREE_ADDRESSABLE (t) = 0; 1684*38fd1498Szrj relayout_decl (t); 1685*38fd1498Szrj if (DECL_NAME (t)) 1686*38fd1498Szrj { 1687*38fd1498Szrj /* Adjust DECL_VALUE_EXPR of the original var. */ 1688*38fd1498Szrj tree outer = outer_curly_brace_block (current_function_decl); 1689*38fd1498Szrj tree var; 1690*38fd1498Szrj 1691*38fd1498Szrj if (outer) 1692*38fd1498Szrj for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var)) 1693*38fd1498Szrj if (VAR_P (var) 1694*38fd1498Szrj && DECL_NAME (t) == DECL_NAME (var) 1695*38fd1498Szrj && DECL_HAS_VALUE_EXPR_P (var) 1696*38fd1498Szrj && DECL_VALUE_EXPR (var) == t) 1697*38fd1498Szrj { 1698*38fd1498Szrj tree val = convert_from_reference (t); 1699*38fd1498Szrj SET_DECL_VALUE_EXPR (var, val); 1700*38fd1498Szrj break; 1701*38fd1498Szrj } 1702*38fd1498Szrj } 1703*38fd1498Szrj } 1704*38fd1498Szrj 1705*38fd1498Szrj /* If we're a clone, the body is already GIMPLE. */ 1706*38fd1498Szrj if (DECL_CLONED_FUNCTION_P (fndecl)) 1707*38fd1498Szrj return; 1708*38fd1498Szrj 1709*38fd1498Szrj /* Allow cp_genericize calls to be nested. */ 1710*38fd1498Szrj tree save_bc_label[2]; 1711*38fd1498Szrj save_bc_label[bc_break] = bc_label[bc_break]; 1712*38fd1498Szrj save_bc_label[bc_continue] = bc_label[bc_continue]; 1713*38fd1498Szrj bc_label[bc_break] = NULL_TREE; 1714*38fd1498Szrj bc_label[bc_continue] = NULL_TREE; 1715*38fd1498Szrj 1716*38fd1498Szrj /* We do want to see every occurrence of the parms, so we can't just use 1717*38fd1498Szrj walk_tree's hash functionality. */ 1718*38fd1498Szrj cp_genericize_tree (&DECL_SAVED_TREE (fndecl), true); 1719*38fd1498Szrj 1720*38fd1498Szrj cp_maybe_instrument_return (fndecl); 1721*38fd1498Szrj 1722*38fd1498Szrj /* Do everything else. */ 1723*38fd1498Szrj c_genericize (fndecl); 1724*38fd1498Szrj 1725*38fd1498Szrj gcc_assert (bc_label[bc_break] == NULL); 1726*38fd1498Szrj gcc_assert (bc_label[bc_continue] == NULL); 1727*38fd1498Szrj bc_label[bc_break] = save_bc_label[bc_break]; 1728*38fd1498Szrj bc_label[bc_continue] = save_bc_label[bc_continue]; 1729*38fd1498Szrj } 1730*38fd1498Szrj 1731*38fd1498Szrj /* Build code to apply FN to each member of ARG1 and ARG2. FN may be 1732*38fd1498Szrj NULL if there is in fact nothing to do. ARG2 may be null if FN 1733*38fd1498Szrj actually only takes one argument. */ 1734*38fd1498Szrj 1735*38fd1498Szrj static tree 1736*38fd1498Szrj cxx_omp_clause_apply_fn (tree fn, tree arg1, tree arg2) 1737*38fd1498Szrj { 1738*38fd1498Szrj tree defparm, parm, t; 1739*38fd1498Szrj int i = 0; 1740*38fd1498Szrj int nargs; 1741*38fd1498Szrj tree *argarray; 1742*38fd1498Szrj 1743*38fd1498Szrj if (fn == NULL) 1744*38fd1498Szrj return NULL; 1745*38fd1498Szrj 1746*38fd1498Szrj nargs = list_length (DECL_ARGUMENTS (fn)); 1747*38fd1498Szrj argarray = XALLOCAVEC (tree, nargs); 1748*38fd1498Szrj 1749*38fd1498Szrj defparm = TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (fn))); 1750*38fd1498Szrj if (arg2) 1751*38fd1498Szrj defparm = TREE_CHAIN (defparm); 1752*38fd1498Szrj 1753*38fd1498Szrj bool is_method = TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE; 1754*38fd1498Szrj if (TREE_CODE (TREE_TYPE (arg1)) == ARRAY_TYPE) 1755*38fd1498Szrj { 1756*38fd1498Szrj tree inner_type = TREE_TYPE (arg1); 1757*38fd1498Szrj tree start1, end1, p1; 1758*38fd1498Szrj tree start2 = NULL, p2 = NULL; 1759*38fd1498Szrj tree ret = NULL, lab; 1760*38fd1498Szrj 1761*38fd1498Szrj start1 = arg1; 1762*38fd1498Szrj start2 = arg2; 1763*38fd1498Szrj do 1764*38fd1498Szrj { 1765*38fd1498Szrj inner_type = TREE_TYPE (inner_type); 1766*38fd1498Szrj start1 = build4 (ARRAY_REF, inner_type, start1, 1767*38fd1498Szrj size_zero_node, NULL, NULL); 1768*38fd1498Szrj if (arg2) 1769*38fd1498Szrj start2 = build4 (ARRAY_REF, inner_type, start2, 1770*38fd1498Szrj size_zero_node, NULL, NULL); 1771*38fd1498Szrj } 1772*38fd1498Szrj while (TREE_CODE (inner_type) == ARRAY_TYPE); 1773*38fd1498Szrj start1 = build_fold_addr_expr_loc (input_location, start1); 1774*38fd1498Szrj if (arg2) 1775*38fd1498Szrj start2 = build_fold_addr_expr_loc (input_location, start2); 1776*38fd1498Szrj 1777*38fd1498Szrj end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1)); 1778*38fd1498Szrj end1 = fold_build_pointer_plus (start1, end1); 1779*38fd1498Szrj 1780*38fd1498Szrj p1 = create_tmp_var (TREE_TYPE (start1)); 1781*38fd1498Szrj t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, start1); 1782*38fd1498Szrj append_to_statement_list (t, &ret); 1783*38fd1498Szrj 1784*38fd1498Szrj if (arg2) 1785*38fd1498Szrj { 1786*38fd1498Szrj p2 = create_tmp_var (TREE_TYPE (start2)); 1787*38fd1498Szrj t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, start2); 1788*38fd1498Szrj append_to_statement_list (t, &ret); 1789*38fd1498Szrj } 1790*38fd1498Szrj 1791*38fd1498Szrj lab = create_artificial_label (input_location); 1792*38fd1498Szrj t = build1 (LABEL_EXPR, void_type_node, lab); 1793*38fd1498Szrj append_to_statement_list (t, &ret); 1794*38fd1498Szrj 1795*38fd1498Szrj argarray[i++] = p1; 1796*38fd1498Szrj if (arg2) 1797*38fd1498Szrj argarray[i++] = p2; 1798*38fd1498Szrj /* Handle default arguments. */ 1799*38fd1498Szrj for (parm = defparm; parm && parm != void_list_node; 1800*38fd1498Szrj parm = TREE_CHAIN (parm), i++) 1801*38fd1498Szrj argarray[i] = convert_default_arg (TREE_VALUE (parm), 1802*38fd1498Szrj TREE_PURPOSE (parm), fn, 1803*38fd1498Szrj i - is_method, tf_warning_or_error); 1804*38fd1498Szrj t = build_call_a (fn, i, argarray); 1805*38fd1498Szrj t = fold_convert (void_type_node, t); 1806*38fd1498Szrj t = fold_build_cleanup_point_expr (TREE_TYPE (t), t); 1807*38fd1498Szrj append_to_statement_list (t, &ret); 1808*38fd1498Szrj 1809*38fd1498Szrj t = fold_build_pointer_plus (p1, TYPE_SIZE_UNIT (inner_type)); 1810*38fd1498Szrj t = build2 (MODIFY_EXPR, TREE_TYPE (p1), p1, t); 1811*38fd1498Szrj append_to_statement_list (t, &ret); 1812*38fd1498Szrj 1813*38fd1498Szrj if (arg2) 1814*38fd1498Szrj { 1815*38fd1498Szrj t = fold_build_pointer_plus (p2, TYPE_SIZE_UNIT (inner_type)); 1816*38fd1498Szrj t = build2 (MODIFY_EXPR, TREE_TYPE (p2), p2, t); 1817*38fd1498Szrj append_to_statement_list (t, &ret); 1818*38fd1498Szrj } 1819*38fd1498Szrj 1820*38fd1498Szrj t = build2 (NE_EXPR, boolean_type_node, p1, end1); 1821*38fd1498Szrj t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&lab), NULL); 1822*38fd1498Szrj append_to_statement_list (t, &ret); 1823*38fd1498Szrj 1824*38fd1498Szrj return ret; 1825*38fd1498Szrj } 1826*38fd1498Szrj else 1827*38fd1498Szrj { 1828*38fd1498Szrj argarray[i++] = build_fold_addr_expr_loc (input_location, arg1); 1829*38fd1498Szrj if (arg2) 1830*38fd1498Szrj argarray[i++] = build_fold_addr_expr_loc (input_location, arg2); 1831*38fd1498Szrj /* Handle default arguments. */ 1832*38fd1498Szrj for (parm = defparm; parm && parm != void_list_node; 1833*38fd1498Szrj parm = TREE_CHAIN (parm), i++) 1834*38fd1498Szrj argarray[i] = convert_default_arg (TREE_VALUE (parm), 1835*38fd1498Szrj TREE_PURPOSE (parm), fn, 1836*38fd1498Szrj i - is_method, tf_warning_or_error); 1837*38fd1498Szrj t = build_call_a (fn, i, argarray); 1838*38fd1498Szrj t = fold_convert (void_type_node, t); 1839*38fd1498Szrj return fold_build_cleanup_point_expr (TREE_TYPE (t), t); 1840*38fd1498Szrj } 1841*38fd1498Szrj } 1842*38fd1498Szrj 1843*38fd1498Szrj /* Return code to initialize DECL with its default constructor, or 1844*38fd1498Szrj NULL if there's nothing to do. */ 1845*38fd1498Szrj 1846*38fd1498Szrj tree 1847*38fd1498Szrj cxx_omp_clause_default_ctor (tree clause, tree decl, tree /*outer*/) 1848*38fd1498Szrj { 1849*38fd1498Szrj tree info = CP_OMP_CLAUSE_INFO (clause); 1850*38fd1498Szrj tree ret = NULL; 1851*38fd1498Szrj 1852*38fd1498Szrj if (info) 1853*38fd1498Szrj ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), decl, NULL); 1854*38fd1498Szrj 1855*38fd1498Szrj return ret; 1856*38fd1498Szrj } 1857*38fd1498Szrj 1858*38fd1498Szrj /* Return code to initialize DST with a copy constructor from SRC. */ 1859*38fd1498Szrj 1860*38fd1498Szrj tree 1861*38fd1498Szrj cxx_omp_clause_copy_ctor (tree clause, tree dst, tree src) 1862*38fd1498Szrj { 1863*38fd1498Szrj tree info = CP_OMP_CLAUSE_INFO (clause); 1864*38fd1498Szrj tree ret = NULL; 1865*38fd1498Szrj 1866*38fd1498Szrj if (info) 1867*38fd1498Szrj ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 0), dst, src); 1868*38fd1498Szrj if (ret == NULL) 1869*38fd1498Szrj ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); 1870*38fd1498Szrj 1871*38fd1498Szrj return ret; 1872*38fd1498Szrj } 1873*38fd1498Szrj 1874*38fd1498Szrj /* Similarly, except use an assignment operator instead. */ 1875*38fd1498Szrj 1876*38fd1498Szrj tree 1877*38fd1498Szrj cxx_omp_clause_assign_op (tree clause, tree dst, tree src) 1878*38fd1498Szrj { 1879*38fd1498Szrj tree info = CP_OMP_CLAUSE_INFO (clause); 1880*38fd1498Szrj tree ret = NULL; 1881*38fd1498Szrj 1882*38fd1498Szrj if (info) 1883*38fd1498Szrj ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 2), dst, src); 1884*38fd1498Szrj if (ret == NULL) 1885*38fd1498Szrj ret = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src); 1886*38fd1498Szrj 1887*38fd1498Szrj return ret; 1888*38fd1498Szrj } 1889*38fd1498Szrj 1890*38fd1498Szrj /* Return code to destroy DECL. */ 1891*38fd1498Szrj 1892*38fd1498Szrj tree 1893*38fd1498Szrj cxx_omp_clause_dtor (tree clause, tree decl) 1894*38fd1498Szrj { 1895*38fd1498Szrj tree info = CP_OMP_CLAUSE_INFO (clause); 1896*38fd1498Szrj tree ret = NULL; 1897*38fd1498Szrj 1898*38fd1498Szrj if (info) 1899*38fd1498Szrj ret = cxx_omp_clause_apply_fn (TREE_VEC_ELT (info, 1), decl, NULL); 1900*38fd1498Szrj 1901*38fd1498Szrj return ret; 1902*38fd1498Szrj } 1903*38fd1498Szrj 1904*38fd1498Szrj /* True if OpenMP should privatize what this DECL points to rather 1905*38fd1498Szrj than the DECL itself. */ 1906*38fd1498Szrj 1907*38fd1498Szrj bool 1908*38fd1498Szrj cxx_omp_privatize_by_reference (const_tree decl) 1909*38fd1498Szrj { 1910*38fd1498Szrj return (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE 1911*38fd1498Szrj || is_invisiref_parm (decl)); 1912*38fd1498Szrj } 1913*38fd1498Szrj 1914*38fd1498Szrj /* Return true if DECL is const qualified var having no mutable member. */ 1915*38fd1498Szrj bool 1916*38fd1498Szrj cxx_omp_const_qual_no_mutable (tree decl) 1917*38fd1498Szrj { 1918*38fd1498Szrj tree type = TREE_TYPE (decl); 1919*38fd1498Szrj if (TREE_CODE (type) == REFERENCE_TYPE) 1920*38fd1498Szrj { 1921*38fd1498Szrj if (!is_invisiref_parm (decl)) 1922*38fd1498Szrj return false; 1923*38fd1498Szrj type = TREE_TYPE (type); 1924*38fd1498Szrj 1925*38fd1498Szrj if (TREE_CODE (decl) == RESULT_DECL && DECL_NAME (decl)) 1926*38fd1498Szrj { 1927*38fd1498Szrj /* NVR doesn't preserve const qualification of the 1928*38fd1498Szrj variable's type. */ 1929*38fd1498Szrj tree outer = outer_curly_brace_block (current_function_decl); 1930*38fd1498Szrj tree var; 1931*38fd1498Szrj 1932*38fd1498Szrj if (outer) 1933*38fd1498Szrj for (var = BLOCK_VARS (outer); var; var = DECL_CHAIN (var)) 1934*38fd1498Szrj if (VAR_P (var) 1935*38fd1498Szrj && DECL_NAME (decl) == DECL_NAME (var) 1936*38fd1498Szrj && (TYPE_MAIN_VARIANT (type) 1937*38fd1498Szrj == TYPE_MAIN_VARIANT (TREE_TYPE (var)))) 1938*38fd1498Szrj { 1939*38fd1498Szrj if (TYPE_READONLY (TREE_TYPE (var))) 1940*38fd1498Szrj type = TREE_TYPE (var); 1941*38fd1498Szrj break; 1942*38fd1498Szrj } 1943*38fd1498Szrj } 1944*38fd1498Szrj } 1945*38fd1498Szrj 1946*38fd1498Szrj if (type == error_mark_node) 1947*38fd1498Szrj return false; 1948*38fd1498Szrj 1949*38fd1498Szrj /* Variables with const-qualified type having no mutable member 1950*38fd1498Szrj are predetermined shared. */ 1951*38fd1498Szrj if (TYPE_READONLY (type) && !cp_has_mutable_p (type)) 1952*38fd1498Szrj return true; 1953*38fd1498Szrj 1954*38fd1498Szrj return false; 1955*38fd1498Szrj } 1956*38fd1498Szrj 1957*38fd1498Szrj /* True if OpenMP sharing attribute of DECL is predetermined. */ 1958*38fd1498Szrj 1959*38fd1498Szrj enum omp_clause_default_kind 1960*38fd1498Szrj cxx_omp_predetermined_sharing (tree decl) 1961*38fd1498Szrj { 1962*38fd1498Szrj /* Static data members are predetermined shared. */ 1963*38fd1498Szrj if (TREE_STATIC (decl)) 1964*38fd1498Szrj { 1965*38fd1498Szrj tree ctx = CP_DECL_CONTEXT (decl); 1966*38fd1498Szrj if (TYPE_P (ctx) && MAYBE_CLASS_TYPE_P (ctx)) 1967*38fd1498Szrj return OMP_CLAUSE_DEFAULT_SHARED; 1968*38fd1498Szrj } 1969*38fd1498Szrj 1970*38fd1498Szrj /* Const qualified vars having no mutable member are predetermined 1971*38fd1498Szrj shared. */ 1972*38fd1498Szrj if (cxx_omp_const_qual_no_mutable (decl)) 1973*38fd1498Szrj return OMP_CLAUSE_DEFAULT_SHARED; 1974*38fd1498Szrj 1975*38fd1498Szrj return OMP_CLAUSE_DEFAULT_UNSPECIFIED; 1976*38fd1498Szrj } 1977*38fd1498Szrj 1978*38fd1498Szrj /* Finalize an implicitly determined clause. */ 1979*38fd1498Szrj 1980*38fd1498Szrj void 1981*38fd1498Szrj cxx_omp_finish_clause (tree c, gimple_seq *) 1982*38fd1498Szrj { 1983*38fd1498Szrj tree decl, inner_type; 1984*38fd1498Szrj bool make_shared = false; 1985*38fd1498Szrj 1986*38fd1498Szrj if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE) 1987*38fd1498Szrj return; 1988*38fd1498Szrj 1989*38fd1498Szrj decl = OMP_CLAUSE_DECL (c); 1990*38fd1498Szrj decl = require_complete_type (decl); 1991*38fd1498Szrj inner_type = TREE_TYPE (decl); 1992*38fd1498Szrj if (decl == error_mark_node) 1993*38fd1498Szrj make_shared = true; 1994*38fd1498Szrj else if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE) 1995*38fd1498Szrj inner_type = TREE_TYPE (inner_type); 1996*38fd1498Szrj 1997*38fd1498Szrj /* We're interested in the base element, not arrays. */ 1998*38fd1498Szrj while (TREE_CODE (inner_type) == ARRAY_TYPE) 1999*38fd1498Szrj inner_type = TREE_TYPE (inner_type); 2000*38fd1498Szrj 2001*38fd1498Szrj /* Check for special function availability by building a call to one. 2002*38fd1498Szrj Save the results, because later we won't be in the right context 2003*38fd1498Szrj for making these queries. */ 2004*38fd1498Szrj if (!make_shared 2005*38fd1498Szrj && CLASS_TYPE_P (inner_type) 2006*38fd1498Szrj && cxx_omp_create_clause_info (c, inner_type, false, true, false, true)) 2007*38fd1498Szrj make_shared = true; 2008*38fd1498Szrj 2009*38fd1498Szrj if (make_shared) 2010*38fd1498Szrj { 2011*38fd1498Szrj OMP_CLAUSE_CODE (c) = OMP_CLAUSE_SHARED; 2012*38fd1498Szrj OMP_CLAUSE_SHARED_FIRSTPRIVATE (c) = 0; 2013*38fd1498Szrj OMP_CLAUSE_SHARED_READONLY (c) = 0; 2014*38fd1498Szrj } 2015*38fd1498Szrj } 2016*38fd1498Szrj 2017*38fd1498Szrj /* Return true if DECL's DECL_VALUE_EXPR (if any) should be 2018*38fd1498Szrj disregarded in OpenMP construct, because it is going to be 2019*38fd1498Szrj remapped during OpenMP lowering. SHARED is true if DECL 2020*38fd1498Szrj is going to be shared, false if it is going to be privatized. */ 2021*38fd1498Szrj 2022*38fd1498Szrj bool 2023*38fd1498Szrj cxx_omp_disregard_value_expr (tree decl, bool shared) 2024*38fd1498Szrj { 2025*38fd1498Szrj return !shared 2026*38fd1498Szrj && VAR_P (decl) 2027*38fd1498Szrj && DECL_HAS_VALUE_EXPR_P (decl) 2028*38fd1498Szrj && DECL_ARTIFICIAL (decl) 2029*38fd1498Szrj && DECL_LANG_SPECIFIC (decl) 2030*38fd1498Szrj && DECL_OMP_PRIVATIZED_MEMBER (decl); 2031*38fd1498Szrj } 2032*38fd1498Szrj 2033*38fd1498Szrj /* Fold expression X which is used as an rvalue if RVAL is true. */ 2034*38fd1498Szrj 2035*38fd1498Szrj static tree 2036*38fd1498Szrj cp_fold_maybe_rvalue (tree x, bool rval) 2037*38fd1498Szrj { 2038*38fd1498Szrj while (true) 2039*38fd1498Szrj { 2040*38fd1498Szrj x = cp_fold (x); 2041*38fd1498Szrj if (rval && DECL_P (x) 2042*38fd1498Szrj && TREE_CODE (TREE_TYPE (x)) != REFERENCE_TYPE) 2043*38fd1498Szrj { 2044*38fd1498Szrj tree v = decl_constant_value (x); 2045*38fd1498Szrj if (v != x && v != error_mark_node) 2046*38fd1498Szrj { 2047*38fd1498Szrj x = v; 2048*38fd1498Szrj continue; 2049*38fd1498Szrj } 2050*38fd1498Szrj } 2051*38fd1498Szrj break; 2052*38fd1498Szrj } 2053*38fd1498Szrj return x; 2054*38fd1498Szrj } 2055*38fd1498Szrj 2056*38fd1498Szrj /* Fold expression X which is used as an rvalue. */ 2057*38fd1498Szrj 2058*38fd1498Szrj static tree 2059*38fd1498Szrj cp_fold_rvalue (tree x) 2060*38fd1498Szrj { 2061*38fd1498Szrj return cp_fold_maybe_rvalue (x, true); 2062*38fd1498Szrj } 2063*38fd1498Szrj 2064*38fd1498Szrj /* Perform folding on expression X. */ 2065*38fd1498Szrj 2066*38fd1498Szrj tree 2067*38fd1498Szrj cp_fully_fold (tree x) 2068*38fd1498Szrj { 2069*38fd1498Szrj if (processing_template_decl) 2070*38fd1498Szrj return x; 2071*38fd1498Szrj /* FIXME cp_fold ought to be a superset of maybe_constant_value so we don't 2072*38fd1498Szrj have to call both. */ 2073*38fd1498Szrj if (cxx_dialect >= cxx11) 2074*38fd1498Szrj { 2075*38fd1498Szrj x = maybe_constant_value (x); 2076*38fd1498Szrj /* Sometimes we are given a CONSTRUCTOR but the call above wraps it into 2077*38fd1498Szrj a TARGET_EXPR; undo that here. */ 2078*38fd1498Szrj if (TREE_CODE (x) == TARGET_EXPR) 2079*38fd1498Szrj x = TARGET_EXPR_INITIAL (x); 2080*38fd1498Szrj else if (TREE_CODE (x) == VIEW_CONVERT_EXPR 2081*38fd1498Szrj && TREE_CODE (TREE_OPERAND (x, 0)) == CONSTRUCTOR 2082*38fd1498Szrj && TREE_TYPE (TREE_OPERAND (x, 0)) == TREE_TYPE (x)) 2083*38fd1498Szrj x = TREE_OPERAND (x, 0); 2084*38fd1498Szrj } 2085*38fd1498Szrj return cp_fold_rvalue (x); 2086*38fd1498Szrj } 2087*38fd1498Szrj 2088*38fd1498Szrj /* c-common interface to cp_fold. If IN_INIT, this is in a static initializer 2089*38fd1498Szrj and certain changes are made to the folding done. Or should be (FIXME). We 2090*38fd1498Szrj never touch maybe_const, as it is only used for the C front-end 2091*38fd1498Szrj C_MAYBE_CONST_EXPR. */ 2092*38fd1498Szrj 2093*38fd1498Szrj tree 2094*38fd1498Szrj c_fully_fold (tree x, bool /*in_init*/, bool */*maybe_const*/, bool lval) 2095*38fd1498Szrj { 2096*38fd1498Szrj return cp_fold_maybe_rvalue (x, !lval); 2097*38fd1498Szrj } 2098*38fd1498Szrj 2099*38fd1498Szrj static GTY((deletable)) hash_map<tree, tree> *fold_cache; 2100*38fd1498Szrj 2101*38fd1498Szrj /* Dispose of the whole FOLD_CACHE. */ 2102*38fd1498Szrj 2103*38fd1498Szrj void 2104*38fd1498Szrj clear_fold_cache (void) 2105*38fd1498Szrj { 2106*38fd1498Szrj if (fold_cache != NULL) 2107*38fd1498Szrj fold_cache->empty (); 2108*38fd1498Szrj } 2109*38fd1498Szrj 2110*38fd1498Szrj /* This function tries to fold an expression X. 2111*38fd1498Szrj To avoid combinatorial explosion, folding results are kept in fold_cache. 2112*38fd1498Szrj If X is invalid, we don't fold at all. 2113*38fd1498Szrj For performance reasons we don't cache expressions representing a 2114*38fd1498Szrj declaration or constant. 2115*38fd1498Szrj Function returns X or its folded variant. */ 2116*38fd1498Szrj 2117*38fd1498Szrj static tree 2118*38fd1498Szrj cp_fold (tree x) 2119*38fd1498Szrj { 2120*38fd1498Szrj tree op0, op1, op2, op3; 2121*38fd1498Szrj tree org_x = x, r = NULL_TREE; 2122*38fd1498Szrj enum tree_code code; 2123*38fd1498Szrj location_t loc; 2124*38fd1498Szrj bool rval_ops = true; 2125*38fd1498Szrj 2126*38fd1498Szrj if (!x || x == error_mark_node) 2127*38fd1498Szrj return x; 2128*38fd1498Szrj 2129*38fd1498Szrj if (EXPR_P (x) && (!TREE_TYPE (x) || TREE_TYPE (x) == error_mark_node)) 2130*38fd1498Szrj return x; 2131*38fd1498Szrj 2132*38fd1498Szrj /* Don't bother to cache DECLs or constants. */ 2133*38fd1498Szrj if (DECL_P (x) || CONSTANT_CLASS_P (x)) 2134*38fd1498Szrj return x; 2135*38fd1498Szrj 2136*38fd1498Szrj if (fold_cache == NULL) 2137*38fd1498Szrj fold_cache = hash_map<tree, tree>::create_ggc (101); 2138*38fd1498Szrj 2139*38fd1498Szrj if (tree *cached = fold_cache->get (x)) 2140*38fd1498Szrj return *cached; 2141*38fd1498Szrj 2142*38fd1498Szrj code = TREE_CODE (x); 2143*38fd1498Szrj switch (code) 2144*38fd1498Szrj { 2145*38fd1498Szrj case CLEANUP_POINT_EXPR: 2146*38fd1498Szrj /* Strip CLEANUP_POINT_EXPR if the expression doesn't have side 2147*38fd1498Szrj effects. */ 2148*38fd1498Szrj r = cp_fold_rvalue (TREE_OPERAND (x, 0)); 2149*38fd1498Szrj if (!TREE_SIDE_EFFECTS (r)) 2150*38fd1498Szrj x = r; 2151*38fd1498Szrj break; 2152*38fd1498Szrj 2153*38fd1498Szrj case SIZEOF_EXPR: 2154*38fd1498Szrj x = fold_sizeof_expr (x); 2155*38fd1498Szrj break; 2156*38fd1498Szrj 2157*38fd1498Szrj case VIEW_CONVERT_EXPR: 2158*38fd1498Szrj rval_ops = false; 2159*38fd1498Szrj /* FALLTHRU */ 2160*38fd1498Szrj case CONVERT_EXPR: 2161*38fd1498Szrj case NOP_EXPR: 2162*38fd1498Szrj case NON_LVALUE_EXPR: 2163*38fd1498Szrj 2164*38fd1498Szrj if (VOID_TYPE_P (TREE_TYPE (x))) 2165*38fd1498Szrj { 2166*38fd1498Szrj /* This is just to make sure we don't end up with casts to 2167*38fd1498Szrj void from error_mark_node. If we just return x, then 2168*38fd1498Szrj cp_fold_r might fold the operand into error_mark_node and 2169*38fd1498Szrj leave the conversion in the IR. STRIP_USELESS_TYPE_CONVERSION 2170*38fd1498Szrj during gimplification doesn't like such casts. 2171*38fd1498Szrj Don't create a new tree if op0 != TREE_OPERAND (x, 0), the 2172*38fd1498Szrj folding of the operand should be in the caches and if in cp_fold_r 2173*38fd1498Szrj it will modify it in place. */ 2174*38fd1498Szrj op0 = cp_fold (TREE_OPERAND (x, 0)); 2175*38fd1498Szrj if (op0 == error_mark_node) 2176*38fd1498Szrj x = error_mark_node; 2177*38fd1498Szrj break; 2178*38fd1498Szrj } 2179*38fd1498Szrj 2180*38fd1498Szrj loc = EXPR_LOCATION (x); 2181*38fd1498Szrj op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops); 2182*38fd1498Szrj 2183*38fd1498Szrj if (code == CONVERT_EXPR 2184*38fd1498Szrj && SCALAR_TYPE_P (TREE_TYPE (x)) 2185*38fd1498Szrj && op0 != void_node) 2186*38fd1498Szrj /* During parsing we used convert_to_*_nofold; re-convert now using the 2187*38fd1498Szrj folding variants, since fold() doesn't do those transformations. */ 2188*38fd1498Szrj x = fold (convert (TREE_TYPE (x), op0)); 2189*38fd1498Szrj else if (op0 != TREE_OPERAND (x, 0)) 2190*38fd1498Szrj { 2191*38fd1498Szrj if (op0 == error_mark_node) 2192*38fd1498Szrj x = error_mark_node; 2193*38fd1498Szrj else 2194*38fd1498Szrj x = fold_build1_loc (loc, code, TREE_TYPE (x), op0); 2195*38fd1498Szrj } 2196*38fd1498Szrj else 2197*38fd1498Szrj x = fold (x); 2198*38fd1498Szrj 2199*38fd1498Szrj /* Conversion of an out-of-range value has implementation-defined 2200*38fd1498Szrj behavior; the language considers it different from arithmetic 2201*38fd1498Szrj overflow, which is undefined. */ 2202*38fd1498Szrj if (TREE_CODE (op0) == INTEGER_CST 2203*38fd1498Szrj && TREE_OVERFLOW_P (x) && !TREE_OVERFLOW_P (op0)) 2204*38fd1498Szrj TREE_OVERFLOW (x) = false; 2205*38fd1498Szrj 2206*38fd1498Szrj break; 2207*38fd1498Szrj 2208*38fd1498Szrj case INDIRECT_REF: 2209*38fd1498Szrj /* We don't need the decltype(auto) obfuscation anymore. */ 2210*38fd1498Szrj if (REF_PARENTHESIZED_P (x)) 2211*38fd1498Szrj { 2212*38fd1498Szrj tree p = maybe_undo_parenthesized_ref (x); 2213*38fd1498Szrj return cp_fold (p); 2214*38fd1498Szrj } 2215*38fd1498Szrj goto unary; 2216*38fd1498Szrj 2217*38fd1498Szrj case ADDR_EXPR: 2218*38fd1498Szrj loc = EXPR_LOCATION (x); 2219*38fd1498Szrj op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), false); 2220*38fd1498Szrj 2221*38fd1498Szrj /* Cope with user tricks that amount to offsetof. */ 2222*38fd1498Szrj if (op0 != error_mark_node 2223*38fd1498Szrj && TREE_CODE (TREE_TYPE (op0)) != FUNCTION_TYPE 2224*38fd1498Szrj && TREE_CODE (TREE_TYPE (op0)) != METHOD_TYPE) 2225*38fd1498Szrj { 2226*38fd1498Szrj tree val = get_base_address (op0); 2227*38fd1498Szrj if (val 2228*38fd1498Szrj && INDIRECT_REF_P (val) 2229*38fd1498Szrj && COMPLETE_TYPE_P (TREE_TYPE (val)) 2230*38fd1498Szrj && TREE_CONSTANT (TREE_OPERAND (val, 0))) 2231*38fd1498Szrj { 2232*38fd1498Szrj val = TREE_OPERAND (val, 0); 2233*38fd1498Szrj STRIP_NOPS (val); 2234*38fd1498Szrj if (TREE_CODE (val) == INTEGER_CST) 2235*38fd1498Szrj return fold_convert (TREE_TYPE (x), fold_offsetof_1 (op0)); 2236*38fd1498Szrj } 2237*38fd1498Szrj } 2238*38fd1498Szrj goto finish_unary; 2239*38fd1498Szrj 2240*38fd1498Szrj case REALPART_EXPR: 2241*38fd1498Szrj case IMAGPART_EXPR: 2242*38fd1498Szrj rval_ops = false; 2243*38fd1498Szrj /* FALLTHRU */ 2244*38fd1498Szrj case CONJ_EXPR: 2245*38fd1498Szrj case FIX_TRUNC_EXPR: 2246*38fd1498Szrj case FLOAT_EXPR: 2247*38fd1498Szrj case NEGATE_EXPR: 2248*38fd1498Szrj case ABS_EXPR: 2249*38fd1498Szrj case BIT_NOT_EXPR: 2250*38fd1498Szrj case TRUTH_NOT_EXPR: 2251*38fd1498Szrj case FIXED_CONVERT_EXPR: 2252*38fd1498Szrj unary: 2253*38fd1498Szrj 2254*38fd1498Szrj loc = EXPR_LOCATION (x); 2255*38fd1498Szrj op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops); 2256*38fd1498Szrj 2257*38fd1498Szrj finish_unary: 2258*38fd1498Szrj if (op0 != TREE_OPERAND (x, 0)) 2259*38fd1498Szrj { 2260*38fd1498Szrj if (op0 == error_mark_node) 2261*38fd1498Szrj x = error_mark_node; 2262*38fd1498Szrj else 2263*38fd1498Szrj { 2264*38fd1498Szrj x = fold_build1_loc (loc, code, TREE_TYPE (x), op0); 2265*38fd1498Szrj if (code == INDIRECT_REF 2266*38fd1498Szrj && (INDIRECT_REF_P (x) || TREE_CODE (x) == MEM_REF)) 2267*38fd1498Szrj { 2268*38fd1498Szrj TREE_READONLY (x) = TREE_READONLY (org_x); 2269*38fd1498Szrj TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x); 2270*38fd1498Szrj TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x); 2271*38fd1498Szrj } 2272*38fd1498Szrj } 2273*38fd1498Szrj } 2274*38fd1498Szrj else 2275*38fd1498Szrj x = fold (x); 2276*38fd1498Szrj 2277*38fd1498Szrj gcc_assert (TREE_CODE (x) != COND_EXPR 2278*38fd1498Szrj || !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (x, 0)))); 2279*38fd1498Szrj break; 2280*38fd1498Szrj 2281*38fd1498Szrj case UNARY_PLUS_EXPR: 2282*38fd1498Szrj op0 = cp_fold_rvalue (TREE_OPERAND (x, 0)); 2283*38fd1498Szrj if (op0 == error_mark_node) 2284*38fd1498Szrj x = error_mark_node; 2285*38fd1498Szrj else 2286*38fd1498Szrj x = fold_convert (TREE_TYPE (x), op0); 2287*38fd1498Szrj break; 2288*38fd1498Szrj 2289*38fd1498Szrj case POSTDECREMENT_EXPR: 2290*38fd1498Szrj case POSTINCREMENT_EXPR: 2291*38fd1498Szrj case INIT_EXPR: 2292*38fd1498Szrj case PREDECREMENT_EXPR: 2293*38fd1498Szrj case PREINCREMENT_EXPR: 2294*38fd1498Szrj case COMPOUND_EXPR: 2295*38fd1498Szrj case MODIFY_EXPR: 2296*38fd1498Szrj rval_ops = false; 2297*38fd1498Szrj /* FALLTHRU */ 2298*38fd1498Szrj case POINTER_PLUS_EXPR: 2299*38fd1498Szrj case PLUS_EXPR: 2300*38fd1498Szrj case POINTER_DIFF_EXPR: 2301*38fd1498Szrj case MINUS_EXPR: 2302*38fd1498Szrj case MULT_EXPR: 2303*38fd1498Szrj case TRUNC_DIV_EXPR: 2304*38fd1498Szrj case CEIL_DIV_EXPR: 2305*38fd1498Szrj case FLOOR_DIV_EXPR: 2306*38fd1498Szrj case ROUND_DIV_EXPR: 2307*38fd1498Szrj case TRUNC_MOD_EXPR: 2308*38fd1498Szrj case CEIL_MOD_EXPR: 2309*38fd1498Szrj case ROUND_MOD_EXPR: 2310*38fd1498Szrj case RDIV_EXPR: 2311*38fd1498Szrj case EXACT_DIV_EXPR: 2312*38fd1498Szrj case MIN_EXPR: 2313*38fd1498Szrj case MAX_EXPR: 2314*38fd1498Szrj case LSHIFT_EXPR: 2315*38fd1498Szrj case RSHIFT_EXPR: 2316*38fd1498Szrj case LROTATE_EXPR: 2317*38fd1498Szrj case RROTATE_EXPR: 2318*38fd1498Szrj case BIT_AND_EXPR: 2319*38fd1498Szrj case BIT_IOR_EXPR: 2320*38fd1498Szrj case BIT_XOR_EXPR: 2321*38fd1498Szrj case TRUTH_AND_EXPR: 2322*38fd1498Szrj case TRUTH_ANDIF_EXPR: 2323*38fd1498Szrj case TRUTH_OR_EXPR: 2324*38fd1498Szrj case TRUTH_ORIF_EXPR: 2325*38fd1498Szrj case TRUTH_XOR_EXPR: 2326*38fd1498Szrj case LT_EXPR: case LE_EXPR: 2327*38fd1498Szrj case GT_EXPR: case GE_EXPR: 2328*38fd1498Szrj case EQ_EXPR: case NE_EXPR: 2329*38fd1498Szrj case UNORDERED_EXPR: case ORDERED_EXPR: 2330*38fd1498Szrj case UNLT_EXPR: case UNLE_EXPR: 2331*38fd1498Szrj case UNGT_EXPR: case UNGE_EXPR: 2332*38fd1498Szrj case UNEQ_EXPR: case LTGT_EXPR: 2333*38fd1498Szrj case RANGE_EXPR: case COMPLEX_EXPR: 2334*38fd1498Szrj 2335*38fd1498Szrj loc = EXPR_LOCATION (x); 2336*38fd1498Szrj op0 = cp_fold_maybe_rvalue (TREE_OPERAND (x, 0), rval_ops); 2337*38fd1498Szrj op1 = cp_fold_rvalue (TREE_OPERAND (x, 1)); 2338*38fd1498Szrj 2339*38fd1498Szrj if (op0 != TREE_OPERAND (x, 0) || op1 != TREE_OPERAND (x, 1)) 2340*38fd1498Szrj { 2341*38fd1498Szrj if (op0 == error_mark_node || op1 == error_mark_node) 2342*38fd1498Szrj x = error_mark_node; 2343*38fd1498Szrj else 2344*38fd1498Szrj x = fold_build2_loc (loc, code, TREE_TYPE (x), op0, op1); 2345*38fd1498Szrj } 2346*38fd1498Szrj else 2347*38fd1498Szrj x = fold (x); 2348*38fd1498Szrj 2349*38fd1498Szrj if (TREE_NO_WARNING (org_x) 2350*38fd1498Szrj && warn_nonnull_compare 2351*38fd1498Szrj && COMPARISON_CLASS_P (org_x)) 2352*38fd1498Szrj { 2353*38fd1498Szrj if (x == error_mark_node || TREE_CODE (x) == INTEGER_CST) 2354*38fd1498Szrj ; 2355*38fd1498Szrj else if (COMPARISON_CLASS_P (x)) 2356*38fd1498Szrj TREE_NO_WARNING (x) = 1; 2357*38fd1498Szrj /* Otherwise give up on optimizing these, let GIMPLE folders 2358*38fd1498Szrj optimize those later on. */ 2359*38fd1498Szrj else if (op0 != TREE_OPERAND (org_x, 0) 2360*38fd1498Szrj || op1 != TREE_OPERAND (org_x, 1)) 2361*38fd1498Szrj { 2362*38fd1498Szrj x = build2_loc (loc, code, TREE_TYPE (org_x), op0, op1); 2363*38fd1498Szrj TREE_NO_WARNING (x) = 1; 2364*38fd1498Szrj } 2365*38fd1498Szrj else 2366*38fd1498Szrj x = org_x; 2367*38fd1498Szrj } 2368*38fd1498Szrj break; 2369*38fd1498Szrj 2370*38fd1498Szrj case VEC_COND_EXPR: 2371*38fd1498Szrj case COND_EXPR: 2372*38fd1498Szrj loc = EXPR_LOCATION (x); 2373*38fd1498Szrj op0 = cp_fold_rvalue (TREE_OPERAND (x, 0)); 2374*38fd1498Szrj op1 = cp_fold (TREE_OPERAND (x, 1)); 2375*38fd1498Szrj op2 = cp_fold (TREE_OPERAND (x, 2)); 2376*38fd1498Szrj 2377*38fd1498Szrj if (TREE_CODE (TREE_TYPE (x)) == BOOLEAN_TYPE) 2378*38fd1498Szrj { 2379*38fd1498Szrj warning_sentinel s (warn_int_in_bool_context); 2380*38fd1498Szrj if (!VOID_TYPE_P (TREE_TYPE (op1))) 2381*38fd1498Szrj op1 = cp_truthvalue_conversion (op1); 2382*38fd1498Szrj if (!VOID_TYPE_P (TREE_TYPE (op2))) 2383*38fd1498Szrj op2 = cp_truthvalue_conversion (op2); 2384*38fd1498Szrj } 2385*38fd1498Szrj else if (VOID_TYPE_P (TREE_TYPE (x))) 2386*38fd1498Szrj { 2387*38fd1498Szrj if (TREE_CODE (op0) == INTEGER_CST) 2388*38fd1498Szrj { 2389*38fd1498Szrj /* If the condition is constant, fold can fold away 2390*38fd1498Szrj the COND_EXPR. If some statement-level uses of COND_EXPR 2391*38fd1498Szrj have one of the branches NULL, avoid folding crash. */ 2392*38fd1498Szrj if (!op1) 2393*38fd1498Szrj op1 = build_empty_stmt (loc); 2394*38fd1498Szrj if (!op2) 2395*38fd1498Szrj op2 = build_empty_stmt (loc); 2396*38fd1498Szrj } 2397*38fd1498Szrj else 2398*38fd1498Szrj { 2399*38fd1498Szrj /* Otherwise, don't bother folding a void condition, since 2400*38fd1498Szrj it can't produce a constant value. */ 2401*38fd1498Szrj if (op0 != TREE_OPERAND (x, 0) 2402*38fd1498Szrj || op1 != TREE_OPERAND (x, 1) 2403*38fd1498Szrj || op2 != TREE_OPERAND (x, 2)) 2404*38fd1498Szrj x = build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2); 2405*38fd1498Szrj break; 2406*38fd1498Szrj } 2407*38fd1498Szrj } 2408*38fd1498Szrj 2409*38fd1498Szrj if (op0 != TREE_OPERAND (x, 0) 2410*38fd1498Szrj || op1 != TREE_OPERAND (x, 1) 2411*38fd1498Szrj || op2 != TREE_OPERAND (x, 2)) 2412*38fd1498Szrj { 2413*38fd1498Szrj if (op0 == error_mark_node 2414*38fd1498Szrj || op1 == error_mark_node 2415*38fd1498Szrj || op2 == error_mark_node) 2416*38fd1498Szrj x = error_mark_node; 2417*38fd1498Szrj else 2418*38fd1498Szrj x = fold_build3_loc (loc, code, TREE_TYPE (x), op0, op1, op2); 2419*38fd1498Szrj } 2420*38fd1498Szrj else 2421*38fd1498Szrj x = fold (x); 2422*38fd1498Szrj 2423*38fd1498Szrj /* A COND_EXPR might have incompatible types in branches if one or both 2424*38fd1498Szrj arms are bitfields. If folding exposed such a branch, fix it up. */ 2425*38fd1498Szrj if (TREE_CODE (x) != code 2426*38fd1498Szrj && x != error_mark_node 2427*38fd1498Szrj && !useless_type_conversion_p (TREE_TYPE (org_x), TREE_TYPE (x))) 2428*38fd1498Szrj x = fold_convert (TREE_TYPE (org_x), x); 2429*38fd1498Szrj 2430*38fd1498Szrj break; 2431*38fd1498Szrj 2432*38fd1498Szrj case CALL_EXPR: 2433*38fd1498Szrj { 2434*38fd1498Szrj int i, m, sv = optimize, nw = sv, changed = 0; 2435*38fd1498Szrj tree callee = get_callee_fndecl (x); 2436*38fd1498Szrj 2437*38fd1498Szrj /* Some built-in function calls will be evaluated at compile-time in 2438*38fd1498Szrj fold (). Set optimize to 1 when folding __builtin_constant_p inside 2439*38fd1498Szrj a constexpr function so that fold_builtin_1 doesn't fold it to 0. */ 2440*38fd1498Szrj if (callee && DECL_BUILT_IN (callee) && !optimize 2441*38fd1498Szrj && DECL_IS_BUILTIN_CONSTANT_P (callee) 2442*38fd1498Szrj && current_function_decl 2443*38fd1498Szrj && DECL_DECLARED_CONSTEXPR_P (current_function_decl)) 2444*38fd1498Szrj nw = 1; 2445*38fd1498Szrj 2446*38fd1498Szrj x = copy_node (x); 2447*38fd1498Szrj 2448*38fd1498Szrj m = call_expr_nargs (x); 2449*38fd1498Szrj for (i = 0; i < m; i++) 2450*38fd1498Szrj { 2451*38fd1498Szrj r = cp_fold (CALL_EXPR_ARG (x, i)); 2452*38fd1498Szrj if (r != CALL_EXPR_ARG (x, i)) 2453*38fd1498Szrj { 2454*38fd1498Szrj if (r == error_mark_node) 2455*38fd1498Szrj { 2456*38fd1498Szrj x = error_mark_node; 2457*38fd1498Szrj break; 2458*38fd1498Szrj } 2459*38fd1498Szrj changed = 1; 2460*38fd1498Szrj } 2461*38fd1498Szrj CALL_EXPR_ARG (x, i) = r; 2462*38fd1498Szrj } 2463*38fd1498Szrj if (x == error_mark_node) 2464*38fd1498Szrj break; 2465*38fd1498Szrj 2466*38fd1498Szrj optimize = nw; 2467*38fd1498Szrj r = fold (x); 2468*38fd1498Szrj optimize = sv; 2469*38fd1498Szrj 2470*38fd1498Szrj if (TREE_CODE (r) != CALL_EXPR) 2471*38fd1498Szrj { 2472*38fd1498Szrj x = cp_fold (r); 2473*38fd1498Szrj break; 2474*38fd1498Szrj } 2475*38fd1498Szrj 2476*38fd1498Szrj optimize = nw; 2477*38fd1498Szrj 2478*38fd1498Szrj /* Invoke maybe_constant_value for functions declared 2479*38fd1498Szrj constexpr and not called with AGGR_INIT_EXPRs. 2480*38fd1498Szrj TODO: 2481*38fd1498Szrj Do constexpr expansion of expressions where the call itself is not 2482*38fd1498Szrj constant, but the call followed by an INDIRECT_REF is. */ 2483*38fd1498Szrj if (callee && DECL_DECLARED_CONSTEXPR_P (callee) 2484*38fd1498Szrj && !flag_no_inline) 2485*38fd1498Szrj r = maybe_constant_value (x); 2486*38fd1498Szrj optimize = sv; 2487*38fd1498Szrj 2488*38fd1498Szrj if (TREE_CODE (r) != CALL_EXPR) 2489*38fd1498Szrj { 2490*38fd1498Szrj if (DECL_CONSTRUCTOR_P (callee)) 2491*38fd1498Szrj { 2492*38fd1498Szrj loc = EXPR_LOCATION (x); 2493*38fd1498Szrj tree s = build_fold_indirect_ref_loc (loc, 2494*38fd1498Szrj CALL_EXPR_ARG (x, 0)); 2495*38fd1498Szrj r = build2_loc (loc, INIT_EXPR, TREE_TYPE (s), s, r); 2496*38fd1498Szrj } 2497*38fd1498Szrj x = r; 2498*38fd1498Szrj break; 2499*38fd1498Szrj } 2500*38fd1498Szrj 2501*38fd1498Szrj if (!changed) 2502*38fd1498Szrj x = org_x; 2503*38fd1498Szrj break; 2504*38fd1498Szrj } 2505*38fd1498Szrj 2506*38fd1498Szrj case CONSTRUCTOR: 2507*38fd1498Szrj { 2508*38fd1498Szrj unsigned i; 2509*38fd1498Szrj constructor_elt *p; 2510*38fd1498Szrj vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (x); 2511*38fd1498Szrj vec<constructor_elt, va_gc> *nelts = NULL; 2512*38fd1498Szrj FOR_EACH_VEC_SAFE_ELT (elts, i, p) 2513*38fd1498Szrj { 2514*38fd1498Szrj tree op = cp_fold (p->value); 2515*38fd1498Szrj if (op != p->value) 2516*38fd1498Szrj { 2517*38fd1498Szrj if (op == error_mark_node) 2518*38fd1498Szrj { 2519*38fd1498Szrj x = error_mark_node; 2520*38fd1498Szrj vec_free (nelts); 2521*38fd1498Szrj break; 2522*38fd1498Szrj } 2523*38fd1498Szrj if (nelts == NULL) 2524*38fd1498Szrj nelts = elts->copy (); 2525*38fd1498Szrj (*nelts)[i].value = op; 2526*38fd1498Szrj } 2527*38fd1498Szrj } 2528*38fd1498Szrj if (nelts) 2529*38fd1498Szrj { 2530*38fd1498Szrj x = build_constructor (TREE_TYPE (x), nelts); 2531*38fd1498Szrj CONSTRUCTOR_PLACEHOLDER_BOUNDARY (x) 2532*38fd1498Szrj = CONSTRUCTOR_PLACEHOLDER_BOUNDARY (org_x); 2533*38fd1498Szrj } 2534*38fd1498Szrj if (VECTOR_TYPE_P (TREE_TYPE (x))) 2535*38fd1498Szrj x = fold (x); 2536*38fd1498Szrj break; 2537*38fd1498Szrj } 2538*38fd1498Szrj case TREE_VEC: 2539*38fd1498Szrj { 2540*38fd1498Szrj bool changed = false; 2541*38fd1498Szrj vec<tree, va_gc> *vec = make_tree_vector (); 2542*38fd1498Szrj int i, n = TREE_VEC_LENGTH (x); 2543*38fd1498Szrj vec_safe_reserve (vec, n); 2544*38fd1498Szrj 2545*38fd1498Szrj for (i = 0; i < n; i++) 2546*38fd1498Szrj { 2547*38fd1498Szrj tree op = cp_fold (TREE_VEC_ELT (x, i)); 2548*38fd1498Szrj vec->quick_push (op); 2549*38fd1498Szrj if (op != TREE_VEC_ELT (x, i)) 2550*38fd1498Szrj changed = true; 2551*38fd1498Szrj } 2552*38fd1498Szrj 2553*38fd1498Szrj if (changed) 2554*38fd1498Szrj { 2555*38fd1498Szrj r = copy_node (x); 2556*38fd1498Szrj for (i = 0; i < n; i++) 2557*38fd1498Szrj TREE_VEC_ELT (r, i) = (*vec)[i]; 2558*38fd1498Szrj x = r; 2559*38fd1498Szrj } 2560*38fd1498Szrj 2561*38fd1498Szrj release_tree_vector (vec); 2562*38fd1498Szrj } 2563*38fd1498Szrj 2564*38fd1498Szrj break; 2565*38fd1498Szrj 2566*38fd1498Szrj case ARRAY_REF: 2567*38fd1498Szrj case ARRAY_RANGE_REF: 2568*38fd1498Szrj 2569*38fd1498Szrj loc = EXPR_LOCATION (x); 2570*38fd1498Szrj op0 = cp_fold (TREE_OPERAND (x, 0)); 2571*38fd1498Szrj op1 = cp_fold (TREE_OPERAND (x, 1)); 2572*38fd1498Szrj op2 = cp_fold (TREE_OPERAND (x, 2)); 2573*38fd1498Szrj op3 = cp_fold (TREE_OPERAND (x, 3)); 2574*38fd1498Szrj 2575*38fd1498Szrj if (op0 != TREE_OPERAND (x, 0) 2576*38fd1498Szrj || op1 != TREE_OPERAND (x, 1) 2577*38fd1498Szrj || op2 != TREE_OPERAND (x, 2) 2578*38fd1498Szrj || op3 != TREE_OPERAND (x, 3)) 2579*38fd1498Szrj { 2580*38fd1498Szrj if (op0 == error_mark_node 2581*38fd1498Szrj || op1 == error_mark_node 2582*38fd1498Szrj || op2 == error_mark_node 2583*38fd1498Szrj || op3 == error_mark_node) 2584*38fd1498Szrj x = error_mark_node; 2585*38fd1498Szrj else 2586*38fd1498Szrj { 2587*38fd1498Szrj x = build4_loc (loc, code, TREE_TYPE (x), op0, op1, op2, op3); 2588*38fd1498Szrj TREE_READONLY (x) = TREE_READONLY (org_x); 2589*38fd1498Szrj TREE_SIDE_EFFECTS (x) = TREE_SIDE_EFFECTS (org_x); 2590*38fd1498Szrj TREE_THIS_VOLATILE (x) = TREE_THIS_VOLATILE (org_x); 2591*38fd1498Szrj } 2592*38fd1498Szrj } 2593*38fd1498Szrj 2594*38fd1498Szrj x = fold (x); 2595*38fd1498Szrj break; 2596*38fd1498Szrj 2597*38fd1498Szrj case SAVE_EXPR: 2598*38fd1498Szrj /* A SAVE_EXPR might contain e.g. (0 * i) + (0 * j), which, after 2599*38fd1498Szrj folding, evaluates to an invariant. In that case no need to wrap 2600*38fd1498Szrj this folded tree with a SAVE_EXPR. */ 2601*38fd1498Szrj r = cp_fold (TREE_OPERAND (x, 0)); 2602*38fd1498Szrj if (tree_invariant_p (r)) 2603*38fd1498Szrj x = r; 2604*38fd1498Szrj break; 2605*38fd1498Szrj 2606*38fd1498Szrj default: 2607*38fd1498Szrj return org_x; 2608*38fd1498Szrj } 2609*38fd1498Szrj 2610*38fd1498Szrj fold_cache->put (org_x, x); 2611*38fd1498Szrj /* Prevent that we try to fold an already folded result again. */ 2612*38fd1498Szrj if (x != org_x) 2613*38fd1498Szrj fold_cache->put (x, x); 2614*38fd1498Szrj 2615*38fd1498Szrj return x; 2616*38fd1498Szrj } 2617*38fd1498Szrj 2618*38fd1498Szrj #include "gt-cp-cp-gimplify.h" 2619