1*38fd1498Szrj /* Miscellaneous SSA utility functions. 2*38fd1498Szrj Copyright (C) 2001-2018 Free Software Foundation, Inc. 3*38fd1498Szrj 4*38fd1498Szrj This file is part of GCC. 5*38fd1498Szrj 6*38fd1498Szrj GCC is free software; you can redistribute it and/or modify 7*38fd1498Szrj it under the terms of the GNU General Public License as published by 8*38fd1498Szrj the Free Software Foundation; either version 3, or (at your option) 9*38fd1498Szrj any later version. 10*38fd1498Szrj 11*38fd1498Szrj GCC is distributed in the hope that it will be useful, 12*38fd1498Szrj but WITHOUT ANY WARRANTY; without even the implied warranty of 13*38fd1498Szrj MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14*38fd1498Szrj GNU General Public License for more details. 15*38fd1498Szrj 16*38fd1498Szrj You should have received a copy of the GNU General Public License 17*38fd1498Szrj along with GCC; see the file COPYING3. If not see 18*38fd1498Szrj <http://www.gnu.org/licenses/>. */ 19*38fd1498Szrj 20*38fd1498Szrj #include "config.h" 21*38fd1498Szrj #include "system.h" 22*38fd1498Szrj #include "coretypes.h" 23*38fd1498Szrj #include "backend.h" 24*38fd1498Szrj #include "tree.h" 25*38fd1498Szrj #include "gimple.h" 26*38fd1498Szrj #include "cfghooks.h" 27*38fd1498Szrj #include "tree-pass.h" 28*38fd1498Szrj #include "ssa.h" 29*38fd1498Szrj #include "gimple-pretty-print.h" 30*38fd1498Szrj #include "diagnostic-core.h" 31*38fd1498Szrj #include "fold-const.h" 32*38fd1498Szrj #include "stor-layout.h" 33*38fd1498Szrj #include "gimple-fold.h" 34*38fd1498Szrj #include "gimplify.h" 35*38fd1498Szrj #include "gimple-iterator.h" 36*38fd1498Szrj #include "gimple-walk.h" 37*38fd1498Szrj #include "tree-ssa-loop-manip.h" 38*38fd1498Szrj #include "tree-into-ssa.h" 39*38fd1498Szrj #include "tree-ssa.h" 40*38fd1498Szrj #include "cfgloop.h" 41*38fd1498Szrj #include "cfgexpand.h" 42*38fd1498Szrj #include "tree-cfg.h" 43*38fd1498Szrj #include "tree-dfa.h" 44*38fd1498Szrj #include "stringpool.h" 45*38fd1498Szrj #include "attribs.h" 46*38fd1498Szrj #include "asan.h" 47*38fd1498Szrj 48*38fd1498Szrj /* Pointer map of variable mappings, keyed by edge. */ 49*38fd1498Szrj static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps; 50*38fd1498Szrj 51*38fd1498Szrj 52*38fd1498Szrj /* Add a mapping with PHI RESULT and PHI DEF associated with edge E. */ 53*38fd1498Szrj 54*38fd1498Szrj void 55*38fd1498Szrj redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus) 56*38fd1498Szrj { 57*38fd1498Szrj edge_var_map new_node; 58*38fd1498Szrj 59*38fd1498Szrj if (edge_var_maps == NULL) 60*38fd1498Szrj edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >; 61*38fd1498Szrj 62*38fd1498Szrj auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e); 63*38fd1498Szrj new_node.def = def; 64*38fd1498Szrj new_node.result = result; 65*38fd1498Szrj new_node.locus = locus; 66*38fd1498Szrj 67*38fd1498Szrj slot.safe_push (new_node); 68*38fd1498Szrj } 69*38fd1498Szrj 70*38fd1498Szrj 71*38fd1498Szrj /* Clear the var mappings in edge E. */ 72*38fd1498Szrj 73*38fd1498Szrj void 74*38fd1498Szrj redirect_edge_var_map_clear (edge e) 75*38fd1498Szrj { 76*38fd1498Szrj if (!edge_var_maps) 77*38fd1498Szrj return; 78*38fd1498Szrj 79*38fd1498Szrj auto_vec<edge_var_map> *head = edge_var_maps->get (e); 80*38fd1498Szrj 81*38fd1498Szrj if (head) 82*38fd1498Szrj head->release (); 83*38fd1498Szrj } 84*38fd1498Szrj 85*38fd1498Szrj 86*38fd1498Szrj /* Duplicate the redirected var mappings in OLDE in NEWE. 87*38fd1498Szrj 88*38fd1498Szrj This assumes a hash_map can have multiple edges mapping to the same 89*38fd1498Szrj var_map (many to one mapping), since we don't remove the previous mappings. 90*38fd1498Szrj */ 91*38fd1498Szrj 92*38fd1498Szrj void 93*38fd1498Szrj redirect_edge_var_map_dup (edge newe, edge olde) 94*38fd1498Szrj { 95*38fd1498Szrj if (!edge_var_maps) 96*38fd1498Szrj return; 97*38fd1498Szrj 98*38fd1498Szrj auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe); 99*38fd1498Szrj auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde); 100*38fd1498Szrj if (!old_head) 101*38fd1498Szrj return; 102*38fd1498Szrj 103*38fd1498Szrj new_head->safe_splice (*old_head); 104*38fd1498Szrj } 105*38fd1498Szrj 106*38fd1498Szrj 107*38fd1498Szrj /* Return the variable mappings for a given edge. If there is none, return 108*38fd1498Szrj NULL. */ 109*38fd1498Szrj 110*38fd1498Szrj vec<edge_var_map> * 111*38fd1498Szrj redirect_edge_var_map_vector (edge e) 112*38fd1498Szrj { 113*38fd1498Szrj /* Hey, what kind of idiot would... you'd be surprised. */ 114*38fd1498Szrj if (!edge_var_maps) 115*38fd1498Szrj return NULL; 116*38fd1498Szrj 117*38fd1498Szrj auto_vec<edge_var_map> *slot = edge_var_maps->get (e); 118*38fd1498Szrj if (!slot) 119*38fd1498Szrj return NULL; 120*38fd1498Szrj 121*38fd1498Szrj return slot; 122*38fd1498Szrj } 123*38fd1498Szrj 124*38fd1498Szrj /* Clear the edge variable mappings. */ 125*38fd1498Szrj 126*38fd1498Szrj void 127*38fd1498Szrj redirect_edge_var_map_empty (void) 128*38fd1498Szrj { 129*38fd1498Szrj if (edge_var_maps) 130*38fd1498Szrj edge_var_maps->empty (); 131*38fd1498Szrj } 132*38fd1498Szrj 133*38fd1498Szrj 134*38fd1498Szrj /* Remove the corresponding arguments from the PHI nodes in E's 135*38fd1498Szrj destination block and redirect it to DEST. Return redirected edge. 136*38fd1498Szrj The list of removed arguments is stored in a vector accessed 137*38fd1498Szrj through edge_var_maps. */ 138*38fd1498Szrj 139*38fd1498Szrj edge 140*38fd1498Szrj ssa_redirect_edge (edge e, basic_block dest) 141*38fd1498Szrj { 142*38fd1498Szrj gphi_iterator gsi; 143*38fd1498Szrj gphi *phi; 144*38fd1498Szrj 145*38fd1498Szrj redirect_edge_var_map_clear (e); 146*38fd1498Szrj 147*38fd1498Szrj /* Remove the appropriate PHI arguments in E's destination block. 148*38fd1498Szrj If we are redirecting a copied edge the destination has not 149*38fd1498Szrj got PHI argument space reserved nor an interesting argument. */ 150*38fd1498Szrj if (! (e->dest->flags & BB_DUPLICATED)) 151*38fd1498Szrj for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi)) 152*38fd1498Szrj { 153*38fd1498Szrj tree def; 154*38fd1498Szrj source_location locus ; 155*38fd1498Szrj 156*38fd1498Szrj phi = gsi.phi (); 157*38fd1498Szrj def = gimple_phi_arg_def (phi, e->dest_idx); 158*38fd1498Szrj locus = gimple_phi_arg_location (phi, e->dest_idx); 159*38fd1498Szrj 160*38fd1498Szrj if (def == NULL_TREE) 161*38fd1498Szrj continue; 162*38fd1498Szrj 163*38fd1498Szrj redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus); 164*38fd1498Szrj } 165*38fd1498Szrj 166*38fd1498Szrj e = redirect_edge_succ_nodup (e, dest); 167*38fd1498Szrj 168*38fd1498Szrj return e; 169*38fd1498Szrj } 170*38fd1498Szrj 171*38fd1498Szrj 172*38fd1498Szrj /* Add PHI arguments queued in PENDING_STMT list on edge E to edge 173*38fd1498Szrj E->dest. */ 174*38fd1498Szrj 175*38fd1498Szrj void 176*38fd1498Szrj flush_pending_stmts (edge e) 177*38fd1498Szrj { 178*38fd1498Szrj gphi *phi; 179*38fd1498Szrj edge_var_map *vm; 180*38fd1498Szrj int i; 181*38fd1498Szrj gphi_iterator gsi; 182*38fd1498Szrj 183*38fd1498Szrj vec<edge_var_map> *v = redirect_edge_var_map_vector (e); 184*38fd1498Szrj if (!v) 185*38fd1498Szrj return; 186*38fd1498Szrj 187*38fd1498Szrj for (gsi = gsi_start_phis (e->dest), i = 0; 188*38fd1498Szrj !gsi_end_p (gsi) && v->iterate (i, &vm); 189*38fd1498Szrj gsi_next (&gsi), i++) 190*38fd1498Szrj { 191*38fd1498Szrj tree def; 192*38fd1498Szrj 193*38fd1498Szrj phi = gsi.phi (); 194*38fd1498Szrj def = redirect_edge_var_map_def (vm); 195*38fd1498Szrj add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm)); 196*38fd1498Szrj } 197*38fd1498Szrj 198*38fd1498Szrj redirect_edge_var_map_clear (e); 199*38fd1498Szrj } 200*38fd1498Szrj 201*38fd1498Szrj /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a 202*38fd1498Szrj GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an 203*38fd1498Szrj expression with a different value. 204*38fd1498Szrj 205*38fd1498Szrj This will update any annotations (say debug bind stmts) referring 206*38fd1498Szrj to the original LHS, so that they use the RHS instead. This is 207*38fd1498Szrj done even if NLHS and LHS are the same, for it is understood that 208*38fd1498Szrj the RHS will be modified afterwards, and NLHS will not be assigned 209*38fd1498Szrj an equivalent value. 210*38fd1498Szrj 211*38fd1498Szrj Adjusting any non-annotation uses of the LHS, if needed, is a 212*38fd1498Szrj responsibility of the caller. 213*38fd1498Szrj 214*38fd1498Szrj The effect of this call should be pretty much the same as that of 215*38fd1498Szrj inserting a copy of STMT before STMT, and then removing the 216*38fd1498Szrj original stmt, at which time gsi_remove() would have update 217*38fd1498Szrj annotations, but using this function saves all the inserting, 218*38fd1498Szrj copying and removing. */ 219*38fd1498Szrj 220*38fd1498Szrj void 221*38fd1498Szrj gimple_replace_ssa_lhs (gimple *stmt, tree nlhs) 222*38fd1498Szrj { 223*38fd1498Szrj if (MAY_HAVE_DEBUG_BIND_STMTS) 224*38fd1498Szrj { 225*38fd1498Szrj tree lhs = gimple_get_lhs (stmt); 226*38fd1498Szrj 227*38fd1498Szrj gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt); 228*38fd1498Szrj 229*38fd1498Szrj insert_debug_temp_for_var_def (NULL, lhs); 230*38fd1498Szrj } 231*38fd1498Szrj 232*38fd1498Szrj gimple_set_lhs (stmt, nlhs); 233*38fd1498Szrj } 234*38fd1498Szrj 235*38fd1498Szrj 236*38fd1498Szrj /* Given a tree for an expression for which we might want to emit 237*38fd1498Szrj locations or values in debug information (generally a variable, but 238*38fd1498Szrj we might deal with other kinds of trees in the future), return the 239*38fd1498Szrj tree that should be used as the variable of a DEBUG_BIND STMT or 240*38fd1498Szrj VAR_LOCATION INSN or NOTE. Return NULL if VAR is not to be tracked. */ 241*38fd1498Szrj 242*38fd1498Szrj tree 243*38fd1498Szrj target_for_debug_bind (tree var) 244*38fd1498Szrj { 245*38fd1498Szrj if (!MAY_HAVE_DEBUG_BIND_STMTS) 246*38fd1498Szrj return NULL_TREE; 247*38fd1498Szrj 248*38fd1498Szrj if (TREE_CODE (var) == SSA_NAME) 249*38fd1498Szrj { 250*38fd1498Szrj var = SSA_NAME_VAR (var); 251*38fd1498Szrj if (var == NULL_TREE) 252*38fd1498Szrj return NULL_TREE; 253*38fd1498Szrj } 254*38fd1498Szrj 255*38fd1498Szrj if ((!VAR_P (var) || VAR_DECL_IS_VIRTUAL_OPERAND (var)) 256*38fd1498Szrj && TREE_CODE (var) != PARM_DECL) 257*38fd1498Szrj return NULL_TREE; 258*38fd1498Szrj 259*38fd1498Szrj if (DECL_HAS_VALUE_EXPR_P (var)) 260*38fd1498Szrj return target_for_debug_bind (DECL_VALUE_EXPR (var)); 261*38fd1498Szrj 262*38fd1498Szrj if (DECL_IGNORED_P (var)) 263*38fd1498Szrj return NULL_TREE; 264*38fd1498Szrj 265*38fd1498Szrj /* var-tracking only tracks registers. */ 266*38fd1498Szrj if (!is_gimple_reg_type (TREE_TYPE (var))) 267*38fd1498Szrj return NULL_TREE; 268*38fd1498Szrj 269*38fd1498Szrj return var; 270*38fd1498Szrj } 271*38fd1498Szrj 272*38fd1498Szrj /* Called via walk_tree, look for SSA_NAMEs that have already been 273*38fd1498Szrj released. */ 274*38fd1498Szrj 275*38fd1498Szrj static tree 276*38fd1498Szrj find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_) 277*38fd1498Szrj { 278*38fd1498Szrj struct walk_stmt_info *wi = (struct walk_stmt_info *) data_; 279*38fd1498Szrj 280*38fd1498Szrj if (wi && wi->is_lhs) 281*38fd1498Szrj return NULL_TREE; 282*38fd1498Szrj 283*38fd1498Szrj if (TREE_CODE (*tp) == SSA_NAME) 284*38fd1498Szrj { 285*38fd1498Szrj if (SSA_NAME_IN_FREE_LIST (*tp)) 286*38fd1498Szrj return *tp; 287*38fd1498Szrj 288*38fd1498Szrj *walk_subtrees = 0; 289*38fd1498Szrj } 290*38fd1498Szrj else if (IS_TYPE_OR_DECL_P (*tp)) 291*38fd1498Szrj *walk_subtrees = 0; 292*38fd1498Szrj 293*38fd1498Szrj return NULL_TREE; 294*38fd1498Szrj } 295*38fd1498Szrj 296*38fd1498Szrj /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced 297*38fd1498Szrj by other DEBUG stmts, and replace uses of the DEF with the 298*38fd1498Szrj newly-created debug temp. */ 299*38fd1498Szrj 300*38fd1498Szrj void 301*38fd1498Szrj insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var) 302*38fd1498Szrj { 303*38fd1498Szrj imm_use_iterator imm_iter; 304*38fd1498Szrj use_operand_p use_p; 305*38fd1498Szrj gimple *stmt; 306*38fd1498Szrj gimple *def_stmt = NULL; 307*38fd1498Szrj int usecount = 0; 308*38fd1498Szrj tree value = NULL; 309*38fd1498Szrj 310*38fd1498Szrj if (!MAY_HAVE_DEBUG_BIND_STMTS) 311*38fd1498Szrj return; 312*38fd1498Szrj 313*38fd1498Szrj /* If this name has already been registered for replacement, do nothing 314*38fd1498Szrj as anything that uses this name isn't in SSA form. */ 315*38fd1498Szrj if (name_registered_for_update_p (var)) 316*38fd1498Szrj return; 317*38fd1498Szrj 318*38fd1498Szrj /* Check whether there are debug stmts that reference this variable and, 319*38fd1498Szrj if there are, decide whether we should use a debug temp. */ 320*38fd1498Szrj FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var) 321*38fd1498Szrj { 322*38fd1498Szrj stmt = USE_STMT (use_p); 323*38fd1498Szrj 324*38fd1498Szrj if (!gimple_debug_bind_p (stmt)) 325*38fd1498Szrj continue; 326*38fd1498Szrj 327*38fd1498Szrj if (usecount++) 328*38fd1498Szrj break; 329*38fd1498Szrj 330*38fd1498Szrj if (gimple_debug_bind_get_value (stmt) != var) 331*38fd1498Szrj { 332*38fd1498Szrj /* Count this as an additional use, so as to make sure we 333*38fd1498Szrj use a temp unless VAR's definition has a SINGLE_RHS that 334*38fd1498Szrj can be shared. */ 335*38fd1498Szrj usecount++; 336*38fd1498Szrj break; 337*38fd1498Szrj } 338*38fd1498Szrj } 339*38fd1498Szrj 340*38fd1498Szrj if (!usecount) 341*38fd1498Szrj return; 342*38fd1498Szrj 343*38fd1498Szrj if (gsi) 344*38fd1498Szrj def_stmt = gsi_stmt (*gsi); 345*38fd1498Szrj else 346*38fd1498Szrj def_stmt = SSA_NAME_DEF_STMT (var); 347*38fd1498Szrj 348*38fd1498Szrj /* If we didn't get an insertion point, and the stmt has already 349*38fd1498Szrj been removed, we won't be able to insert the debug bind stmt, so 350*38fd1498Szrj we'll have to drop debug information. */ 351*38fd1498Szrj if (gimple_code (def_stmt) == GIMPLE_PHI) 352*38fd1498Szrj { 353*38fd1498Szrj value = degenerate_phi_result (as_a <gphi *> (def_stmt)); 354*38fd1498Szrj if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL)) 355*38fd1498Szrj value = NULL; 356*38fd1498Szrj /* error_mark_node is what fixup_noreturn_call changes PHI arguments 357*38fd1498Szrj to. */ 358*38fd1498Szrj else if (value == error_mark_node) 359*38fd1498Szrj value = NULL; 360*38fd1498Szrj } 361*38fd1498Szrj else if (is_gimple_assign (def_stmt)) 362*38fd1498Szrj { 363*38fd1498Szrj bool no_value = false; 364*38fd1498Szrj 365*38fd1498Szrj if (!dom_info_available_p (CDI_DOMINATORS)) 366*38fd1498Szrj { 367*38fd1498Szrj struct walk_stmt_info wi; 368*38fd1498Szrj 369*38fd1498Szrj memset (&wi, 0, sizeof (wi)); 370*38fd1498Szrj 371*38fd1498Szrj /* When removing blocks without following reverse dominance 372*38fd1498Szrj order, we may sometimes encounter SSA_NAMEs that have 373*38fd1498Szrj already been released, referenced in other SSA_DEFs that 374*38fd1498Szrj we're about to release. Consider: 375*38fd1498Szrj 376*38fd1498Szrj <bb X>: 377*38fd1498Szrj v_1 = foo; 378*38fd1498Szrj 379*38fd1498Szrj <bb Y>: 380*38fd1498Szrj w_2 = v_1 + bar; 381*38fd1498Szrj # DEBUG w => w_2 382*38fd1498Szrj 383*38fd1498Szrj If we deleted BB X first, propagating the value of w_2 384*38fd1498Szrj won't do us any good. It's too late to recover their 385*38fd1498Szrj original definition of v_1: when it was deleted, it was 386*38fd1498Szrj only referenced in other DEFs, it couldn't possibly know 387*38fd1498Szrj it should have been retained, and propagating every 388*38fd1498Szrj single DEF just in case it might have to be propagated 389*38fd1498Szrj into a DEBUG STMT would probably be too wasteful. 390*38fd1498Szrj 391*38fd1498Szrj When dominator information is not readily available, we 392*38fd1498Szrj check for and accept some loss of debug information. But 393*38fd1498Szrj if it is available, there's no excuse for us to remove 394*38fd1498Szrj blocks in the wrong order, so we don't even check for 395*38fd1498Szrj dead SSA NAMEs. SSA verification shall catch any 396*38fd1498Szrj errors. */ 397*38fd1498Szrj if ((!gsi && !gimple_bb (def_stmt)) 398*38fd1498Szrj || walk_gimple_op (def_stmt, find_released_ssa_name, &wi)) 399*38fd1498Szrj no_value = true; 400*38fd1498Szrj } 401*38fd1498Szrj 402*38fd1498Szrj if (!no_value) 403*38fd1498Szrj value = gimple_assign_rhs_to_tree (def_stmt); 404*38fd1498Szrj } 405*38fd1498Szrj 406*38fd1498Szrj if (value) 407*38fd1498Szrj { 408*38fd1498Szrj /* If there's a single use of VAR, and VAR is the entire debug 409*38fd1498Szrj expression (usecount would have been incremented again 410*38fd1498Szrj otherwise), and the definition involves only constants and 411*38fd1498Szrj SSA names, then we can propagate VALUE into this single use, 412*38fd1498Szrj avoiding the temp. 413*38fd1498Szrj 414*38fd1498Szrj We can also avoid using a temp if VALUE can be shared and 415*38fd1498Szrj propagated into all uses, without generating expressions that 416*38fd1498Szrj wouldn't be valid gimple RHSs. 417*38fd1498Szrj 418*38fd1498Szrj Other cases that would require unsharing or non-gimple RHSs 419*38fd1498Szrj are deferred to a debug temp, although we could avoid temps 420*38fd1498Szrj at the expense of duplication of expressions. */ 421*38fd1498Szrj 422*38fd1498Szrj if (CONSTANT_CLASS_P (value) 423*38fd1498Szrj || gimple_code (def_stmt) == GIMPLE_PHI 424*38fd1498Szrj || (usecount == 1 425*38fd1498Szrj && (!gimple_assign_single_p (def_stmt) 426*38fd1498Szrj || is_gimple_min_invariant (value))) 427*38fd1498Szrj || is_gimple_reg (value)) 428*38fd1498Szrj ; 429*38fd1498Szrj else 430*38fd1498Szrj { 431*38fd1498Szrj gdebug *def_temp; 432*38fd1498Szrj tree vexpr = make_node (DEBUG_EXPR_DECL); 433*38fd1498Szrj 434*38fd1498Szrj def_temp = gimple_build_debug_bind (vexpr, 435*38fd1498Szrj unshare_expr (value), 436*38fd1498Szrj def_stmt); 437*38fd1498Szrj 438*38fd1498Szrj DECL_ARTIFICIAL (vexpr) = 1; 439*38fd1498Szrj TREE_TYPE (vexpr) = TREE_TYPE (value); 440*38fd1498Szrj if (DECL_P (value)) 441*38fd1498Szrj SET_DECL_MODE (vexpr, DECL_MODE (value)); 442*38fd1498Szrj else 443*38fd1498Szrj SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (value))); 444*38fd1498Szrj 445*38fd1498Szrj if (gsi) 446*38fd1498Szrj gsi_insert_before (gsi, def_temp, GSI_SAME_STMT); 447*38fd1498Szrj else 448*38fd1498Szrj { 449*38fd1498Szrj gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt); 450*38fd1498Szrj gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT); 451*38fd1498Szrj } 452*38fd1498Szrj 453*38fd1498Szrj value = vexpr; 454*38fd1498Szrj } 455*38fd1498Szrj } 456*38fd1498Szrj 457*38fd1498Szrj FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var) 458*38fd1498Szrj { 459*38fd1498Szrj if (!gimple_debug_bind_p (stmt)) 460*38fd1498Szrj continue; 461*38fd1498Szrj 462*38fd1498Szrj if (value) 463*38fd1498Szrj { 464*38fd1498Szrj FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter) 465*38fd1498Szrj /* unshare_expr is not needed here. vexpr is either a 466*38fd1498Szrj SINGLE_RHS, that can be safely shared, some other RHS 467*38fd1498Szrj that was unshared when we found it had a single debug 468*38fd1498Szrj use, or a DEBUG_EXPR_DECL, that can be safely 469*38fd1498Szrj shared. */ 470*38fd1498Szrj SET_USE (use_p, unshare_expr (value)); 471*38fd1498Szrj /* If we didn't replace uses with a debug decl fold the 472*38fd1498Szrj resulting expression. Otherwise we end up with invalid IL. */ 473*38fd1498Szrj if (TREE_CODE (value) != DEBUG_EXPR_DECL) 474*38fd1498Szrj { 475*38fd1498Szrj gimple_stmt_iterator gsi = gsi_for_stmt (stmt); 476*38fd1498Szrj fold_stmt_inplace (&gsi); 477*38fd1498Szrj } 478*38fd1498Szrj } 479*38fd1498Szrj else 480*38fd1498Szrj gimple_debug_bind_reset_value (stmt); 481*38fd1498Szrj 482*38fd1498Szrj update_stmt (stmt); 483*38fd1498Szrj } 484*38fd1498Szrj } 485*38fd1498Szrj 486*38fd1498Szrj 487*38fd1498Szrj /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by 488*38fd1498Szrj other DEBUG stmts, and replace uses of the DEF with the 489*38fd1498Szrj newly-created debug temp. */ 490*38fd1498Szrj 491*38fd1498Szrj void 492*38fd1498Szrj insert_debug_temps_for_defs (gimple_stmt_iterator *gsi) 493*38fd1498Szrj { 494*38fd1498Szrj gimple *stmt; 495*38fd1498Szrj ssa_op_iter op_iter; 496*38fd1498Szrj def_operand_p def_p; 497*38fd1498Szrj 498*38fd1498Szrj if (!MAY_HAVE_DEBUG_BIND_STMTS) 499*38fd1498Szrj return; 500*38fd1498Szrj 501*38fd1498Szrj stmt = gsi_stmt (*gsi); 502*38fd1498Szrj 503*38fd1498Szrj FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF) 504*38fd1498Szrj { 505*38fd1498Szrj tree var = DEF_FROM_PTR (def_p); 506*38fd1498Szrj 507*38fd1498Szrj if (TREE_CODE (var) != SSA_NAME) 508*38fd1498Szrj continue; 509*38fd1498Szrj 510*38fd1498Szrj insert_debug_temp_for_var_def (gsi, var); 511*38fd1498Szrj } 512*38fd1498Szrj } 513*38fd1498Szrj 514*38fd1498Szrj /* Reset all debug stmts that use SSA_NAME(s) defined in STMT. */ 515*38fd1498Szrj 516*38fd1498Szrj void 517*38fd1498Szrj reset_debug_uses (gimple *stmt) 518*38fd1498Szrj { 519*38fd1498Szrj ssa_op_iter op_iter; 520*38fd1498Szrj def_operand_p def_p; 521*38fd1498Szrj imm_use_iterator imm_iter; 522*38fd1498Szrj gimple *use_stmt; 523*38fd1498Szrj 524*38fd1498Szrj if (!MAY_HAVE_DEBUG_BIND_STMTS) 525*38fd1498Szrj return; 526*38fd1498Szrj 527*38fd1498Szrj FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF) 528*38fd1498Szrj { 529*38fd1498Szrj tree var = DEF_FROM_PTR (def_p); 530*38fd1498Szrj 531*38fd1498Szrj if (TREE_CODE (var) != SSA_NAME) 532*38fd1498Szrj continue; 533*38fd1498Szrj 534*38fd1498Szrj FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var) 535*38fd1498Szrj { 536*38fd1498Szrj if (!gimple_debug_bind_p (use_stmt)) 537*38fd1498Szrj continue; 538*38fd1498Szrj 539*38fd1498Szrj gimple_debug_bind_reset_value (use_stmt); 540*38fd1498Szrj update_stmt (use_stmt); 541*38fd1498Szrj } 542*38fd1498Szrj } 543*38fd1498Szrj } 544*38fd1498Szrj 545*38fd1498Szrj /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing 546*38fd1498Szrj dominated stmts before their dominators, so that release_ssa_defs 547*38fd1498Szrj stands a chance of propagating DEFs into debug bind stmts. */ 548*38fd1498Szrj 549*38fd1498Szrj void 550*38fd1498Szrj release_defs_bitset (bitmap toremove) 551*38fd1498Szrj { 552*38fd1498Szrj unsigned j; 553*38fd1498Szrj bitmap_iterator bi; 554*38fd1498Szrj 555*38fd1498Szrj /* Performing a topological sort is probably overkill, this will 556*38fd1498Szrj most likely run in slightly superlinear time, rather than the 557*38fd1498Szrj pathological quadratic worst case. */ 558*38fd1498Szrj while (!bitmap_empty_p (toremove)) 559*38fd1498Szrj { 560*38fd1498Szrj unsigned to_remove_bit = -1U; 561*38fd1498Szrj EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi) 562*38fd1498Szrj { 563*38fd1498Szrj if (to_remove_bit != -1U) 564*38fd1498Szrj { 565*38fd1498Szrj bitmap_clear_bit (toremove, to_remove_bit); 566*38fd1498Szrj to_remove_bit = -1U; 567*38fd1498Szrj } 568*38fd1498Szrj 569*38fd1498Szrj bool remove_now = true; 570*38fd1498Szrj tree var = ssa_name (j); 571*38fd1498Szrj gimple *stmt; 572*38fd1498Szrj imm_use_iterator uit; 573*38fd1498Szrj 574*38fd1498Szrj FOR_EACH_IMM_USE_STMT (stmt, uit, var) 575*38fd1498Szrj { 576*38fd1498Szrj ssa_op_iter dit; 577*38fd1498Szrj def_operand_p def_p; 578*38fd1498Szrj 579*38fd1498Szrj /* We can't propagate PHI nodes into debug stmts. */ 580*38fd1498Szrj if (gimple_code (stmt) == GIMPLE_PHI 581*38fd1498Szrj || is_gimple_debug (stmt)) 582*38fd1498Szrj continue; 583*38fd1498Szrj 584*38fd1498Szrj /* If we find another definition to remove that uses 585*38fd1498Szrj the one we're looking at, defer the removal of this 586*38fd1498Szrj one, so that it can be propagated into debug stmts 587*38fd1498Szrj after the other is. */ 588*38fd1498Szrj FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF) 589*38fd1498Szrj { 590*38fd1498Szrj tree odef = DEF_FROM_PTR (def_p); 591*38fd1498Szrj 592*38fd1498Szrj if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef))) 593*38fd1498Szrj { 594*38fd1498Szrj remove_now = false; 595*38fd1498Szrj break; 596*38fd1498Szrj } 597*38fd1498Szrj } 598*38fd1498Szrj 599*38fd1498Szrj if (!remove_now) 600*38fd1498Szrj BREAK_FROM_IMM_USE_STMT (uit); 601*38fd1498Szrj } 602*38fd1498Szrj 603*38fd1498Szrj if (remove_now) 604*38fd1498Szrj { 605*38fd1498Szrj gimple *def = SSA_NAME_DEF_STMT (var); 606*38fd1498Szrj gimple_stmt_iterator gsi = gsi_for_stmt (def); 607*38fd1498Szrj 608*38fd1498Szrj if (gimple_code (def) == GIMPLE_PHI) 609*38fd1498Szrj remove_phi_node (&gsi, true); 610*38fd1498Szrj else 611*38fd1498Szrj { 612*38fd1498Szrj gsi_remove (&gsi, true); 613*38fd1498Szrj release_defs (def); 614*38fd1498Szrj } 615*38fd1498Szrj 616*38fd1498Szrj to_remove_bit = j; 617*38fd1498Szrj } 618*38fd1498Szrj } 619*38fd1498Szrj if (to_remove_bit != -1U) 620*38fd1498Szrj bitmap_clear_bit (toremove, to_remove_bit); 621*38fd1498Szrj } 622*38fd1498Szrj 623*38fd1498Szrj } 624*38fd1498Szrj 625*38fd1498Szrj /* Verify virtual SSA form. */ 626*38fd1498Szrj 627*38fd1498Szrj bool 628*38fd1498Szrj verify_vssa (basic_block bb, tree current_vdef, sbitmap visited) 629*38fd1498Szrj { 630*38fd1498Szrj bool err = false; 631*38fd1498Szrj 632*38fd1498Szrj if (bitmap_bit_p (visited, bb->index)) 633*38fd1498Szrj return false; 634*38fd1498Szrj 635*38fd1498Szrj bitmap_set_bit (visited, bb->index); 636*38fd1498Szrj 637*38fd1498Szrj /* Pick up the single virtual PHI def. */ 638*38fd1498Szrj gphi *phi = NULL; 639*38fd1498Szrj for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si); 640*38fd1498Szrj gsi_next (&si)) 641*38fd1498Szrj { 642*38fd1498Szrj tree res = gimple_phi_result (si.phi ()); 643*38fd1498Szrj if (virtual_operand_p (res)) 644*38fd1498Szrj { 645*38fd1498Szrj if (phi) 646*38fd1498Szrj { 647*38fd1498Szrj error ("multiple virtual PHI nodes in BB %d", bb->index); 648*38fd1498Szrj print_gimple_stmt (stderr, phi, 0); 649*38fd1498Szrj print_gimple_stmt (stderr, si.phi (), 0); 650*38fd1498Szrj err = true; 651*38fd1498Szrj } 652*38fd1498Szrj else 653*38fd1498Szrj phi = si.phi (); 654*38fd1498Szrj } 655*38fd1498Szrj } 656*38fd1498Szrj if (phi) 657*38fd1498Szrj { 658*38fd1498Szrj current_vdef = gimple_phi_result (phi); 659*38fd1498Szrj if (TREE_CODE (current_vdef) != SSA_NAME) 660*38fd1498Szrj { 661*38fd1498Szrj error ("virtual definition is not an SSA name"); 662*38fd1498Szrj print_gimple_stmt (stderr, phi, 0); 663*38fd1498Szrj err = true; 664*38fd1498Szrj } 665*38fd1498Szrj } 666*38fd1498Szrj 667*38fd1498Szrj /* Verify stmts. */ 668*38fd1498Szrj for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); 669*38fd1498Szrj gsi_next (&gsi)) 670*38fd1498Szrj { 671*38fd1498Szrj gimple *stmt = gsi_stmt (gsi); 672*38fd1498Szrj tree vuse = gimple_vuse (stmt); 673*38fd1498Szrj if (vuse) 674*38fd1498Szrj { 675*38fd1498Szrj if (vuse != current_vdef) 676*38fd1498Szrj { 677*38fd1498Szrj error ("stmt with wrong VUSE"); 678*38fd1498Szrj print_gimple_stmt (stderr, stmt, 0, TDF_VOPS); 679*38fd1498Szrj fprintf (stderr, "expected "); 680*38fd1498Szrj print_generic_expr (stderr, current_vdef); 681*38fd1498Szrj fprintf (stderr, "\n"); 682*38fd1498Szrj err = true; 683*38fd1498Szrj } 684*38fd1498Szrj tree vdef = gimple_vdef (stmt); 685*38fd1498Szrj if (vdef) 686*38fd1498Szrj { 687*38fd1498Szrj current_vdef = vdef; 688*38fd1498Szrj if (TREE_CODE (current_vdef) != SSA_NAME) 689*38fd1498Szrj { 690*38fd1498Szrj error ("virtual definition is not an SSA name"); 691*38fd1498Szrj print_gimple_stmt (stderr, phi, 0); 692*38fd1498Szrj err = true; 693*38fd1498Szrj } 694*38fd1498Szrj } 695*38fd1498Szrj } 696*38fd1498Szrj } 697*38fd1498Szrj 698*38fd1498Szrj /* Verify destination PHI uses and recurse. */ 699*38fd1498Szrj edge_iterator ei; 700*38fd1498Szrj edge e; 701*38fd1498Szrj FOR_EACH_EDGE (e, ei, bb->succs) 702*38fd1498Szrj { 703*38fd1498Szrj gphi *phi = get_virtual_phi (e->dest); 704*38fd1498Szrj if (phi 705*38fd1498Szrj && PHI_ARG_DEF_FROM_EDGE (phi, e) != current_vdef) 706*38fd1498Szrj { 707*38fd1498Szrj error ("PHI node with wrong VUSE on edge from BB %d", 708*38fd1498Szrj e->src->index); 709*38fd1498Szrj print_gimple_stmt (stderr, phi, 0, TDF_VOPS); 710*38fd1498Szrj fprintf (stderr, "expected "); 711*38fd1498Szrj print_generic_expr (stderr, current_vdef); 712*38fd1498Szrj fprintf (stderr, "\n"); 713*38fd1498Szrj err = true; 714*38fd1498Szrj } 715*38fd1498Szrj 716*38fd1498Szrj /* Recurse. */ 717*38fd1498Szrj err |= verify_vssa (e->dest, current_vdef, visited); 718*38fd1498Szrj } 719*38fd1498Szrj 720*38fd1498Szrj return err; 721*38fd1498Szrj } 722*38fd1498Szrj 723*38fd1498Szrj /* Return true if SSA_NAME is malformed and mark it visited. 724*38fd1498Szrj 725*38fd1498Szrj IS_VIRTUAL is true if this SSA_NAME was found inside a virtual 726*38fd1498Szrj operand. */ 727*38fd1498Szrj 728*38fd1498Szrj static bool 729*38fd1498Szrj verify_ssa_name (tree ssa_name, bool is_virtual) 730*38fd1498Szrj { 731*38fd1498Szrj if (TREE_CODE (ssa_name) != SSA_NAME) 732*38fd1498Szrj { 733*38fd1498Szrj error ("expected an SSA_NAME object"); 734*38fd1498Szrj return true; 735*38fd1498Szrj } 736*38fd1498Szrj 737*38fd1498Szrj if (SSA_NAME_IN_FREE_LIST (ssa_name)) 738*38fd1498Szrj { 739*38fd1498Szrj error ("found an SSA_NAME that had been released into the free pool"); 740*38fd1498Szrj return true; 741*38fd1498Szrj } 742*38fd1498Szrj 743*38fd1498Szrj if (SSA_NAME_VAR (ssa_name) != NULL_TREE 744*38fd1498Szrj && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name))) 745*38fd1498Szrj { 746*38fd1498Szrj error ("type mismatch between an SSA_NAME and its symbol"); 747*38fd1498Szrj return true; 748*38fd1498Szrj } 749*38fd1498Szrj 750*38fd1498Szrj if (is_virtual && !virtual_operand_p (ssa_name)) 751*38fd1498Szrj { 752*38fd1498Szrj error ("found a virtual definition for a GIMPLE register"); 753*38fd1498Szrj return true; 754*38fd1498Szrj } 755*38fd1498Szrj 756*38fd1498Szrj if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun)) 757*38fd1498Szrj { 758*38fd1498Szrj error ("virtual SSA name for non-VOP decl"); 759*38fd1498Szrj return true; 760*38fd1498Szrj } 761*38fd1498Szrj 762*38fd1498Szrj if (!is_virtual && virtual_operand_p (ssa_name)) 763*38fd1498Szrj { 764*38fd1498Szrj error ("found a real definition for a non-register"); 765*38fd1498Szrj return true; 766*38fd1498Szrj } 767*38fd1498Szrj 768*38fd1498Szrj if (SSA_NAME_IS_DEFAULT_DEF (ssa_name) 769*38fd1498Szrj && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))) 770*38fd1498Szrj { 771*38fd1498Szrj error ("found a default name with a non-empty defining statement"); 772*38fd1498Szrj return true; 773*38fd1498Szrj } 774*38fd1498Szrj 775*38fd1498Szrj return false; 776*38fd1498Szrj } 777*38fd1498Szrj 778*38fd1498Szrj 779*38fd1498Szrj /* Return true if the definition of SSA_NAME at block BB is malformed. 780*38fd1498Szrj 781*38fd1498Szrj STMT is the statement where SSA_NAME is created. 782*38fd1498Szrj 783*38fd1498Szrj DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME 784*38fd1498Szrj version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set, 785*38fd1498Szrj it means that the block in that array slot contains the 786*38fd1498Szrj definition of SSA_NAME. 787*38fd1498Szrj 788*38fd1498Szrj IS_VIRTUAL is true if SSA_NAME is created by a VDEF. */ 789*38fd1498Szrj 790*38fd1498Szrj static bool 791*38fd1498Szrj verify_def (basic_block bb, basic_block *definition_block, tree ssa_name, 792*38fd1498Szrj gimple *stmt, bool is_virtual) 793*38fd1498Szrj { 794*38fd1498Szrj if (verify_ssa_name (ssa_name, is_virtual)) 795*38fd1498Szrj goto err; 796*38fd1498Szrj 797*38fd1498Szrj if (SSA_NAME_VAR (ssa_name) 798*38fd1498Szrj && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL 799*38fd1498Szrj && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name))) 800*38fd1498Szrj { 801*38fd1498Szrj error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set"); 802*38fd1498Szrj goto err; 803*38fd1498Szrj } 804*38fd1498Szrj 805*38fd1498Szrj if (definition_block[SSA_NAME_VERSION (ssa_name)]) 806*38fd1498Szrj { 807*38fd1498Szrj error ("SSA_NAME created in two different blocks %i and %i", 808*38fd1498Szrj definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index); 809*38fd1498Szrj goto err; 810*38fd1498Szrj } 811*38fd1498Szrj 812*38fd1498Szrj definition_block[SSA_NAME_VERSION (ssa_name)] = bb; 813*38fd1498Szrj 814*38fd1498Szrj if (SSA_NAME_DEF_STMT (ssa_name) != stmt) 815*38fd1498Szrj { 816*38fd1498Szrj error ("SSA_NAME_DEF_STMT is wrong"); 817*38fd1498Szrj fprintf (stderr, "Expected definition statement:\n"); 818*38fd1498Szrj print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS); 819*38fd1498Szrj fprintf (stderr, "\nActual definition statement:\n"); 820*38fd1498Szrj print_gimple_stmt (stderr, stmt, 4, TDF_VOPS); 821*38fd1498Szrj goto err; 822*38fd1498Szrj } 823*38fd1498Szrj 824*38fd1498Szrj return false; 825*38fd1498Szrj 826*38fd1498Szrj err: 827*38fd1498Szrj fprintf (stderr, "while verifying SSA_NAME "); 828*38fd1498Szrj print_generic_expr (stderr, ssa_name); 829*38fd1498Szrj fprintf (stderr, " in statement\n"); 830*38fd1498Szrj print_gimple_stmt (stderr, stmt, 4, TDF_VOPS); 831*38fd1498Szrj 832*38fd1498Szrj return true; 833*38fd1498Szrj } 834*38fd1498Szrj 835*38fd1498Szrj 836*38fd1498Szrj /* Return true if the use of SSA_NAME at statement STMT in block BB is 837*38fd1498Szrj malformed. 838*38fd1498Szrj 839*38fd1498Szrj DEF_BB is the block where SSA_NAME was found to be created. 840*38fd1498Szrj 841*38fd1498Szrj IDOM contains immediate dominator information for the flowgraph. 842*38fd1498Szrj 843*38fd1498Szrj CHECK_ABNORMAL is true if the caller wants to check whether this use 844*38fd1498Szrj is flowing through an abnormal edge (only used when checking PHI 845*38fd1498Szrj arguments). 846*38fd1498Szrj 847*38fd1498Szrj If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names 848*38fd1498Szrj that are defined before STMT in basic block BB. */ 849*38fd1498Szrj 850*38fd1498Szrj static bool 851*38fd1498Szrj verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p, 852*38fd1498Szrj gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb) 853*38fd1498Szrj { 854*38fd1498Szrj bool err = false; 855*38fd1498Szrj tree ssa_name = USE_FROM_PTR (use_p); 856*38fd1498Szrj 857*38fd1498Szrj if (!TREE_VISITED (ssa_name)) 858*38fd1498Szrj if (verify_imm_links (stderr, ssa_name)) 859*38fd1498Szrj err = true; 860*38fd1498Szrj 861*38fd1498Szrj TREE_VISITED (ssa_name) = 1; 862*38fd1498Szrj 863*38fd1498Szrj if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)) 864*38fd1498Szrj && SSA_NAME_IS_DEFAULT_DEF (ssa_name)) 865*38fd1498Szrj ; /* Default definitions have empty statements. Nothing to do. */ 866*38fd1498Szrj else if (!def_bb) 867*38fd1498Szrj { 868*38fd1498Szrj error ("missing definition"); 869*38fd1498Szrj err = true; 870*38fd1498Szrj } 871*38fd1498Szrj else if (bb != def_bb 872*38fd1498Szrj && !dominated_by_p (CDI_DOMINATORS, bb, def_bb)) 873*38fd1498Szrj { 874*38fd1498Szrj error ("definition in block %i does not dominate use in block %i", 875*38fd1498Szrj def_bb->index, bb->index); 876*38fd1498Szrj err = true; 877*38fd1498Szrj } 878*38fd1498Szrj else if (bb == def_bb 879*38fd1498Szrj && names_defined_in_bb != NULL 880*38fd1498Szrj && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name))) 881*38fd1498Szrj { 882*38fd1498Szrj error ("definition in block %i follows the use", def_bb->index); 883*38fd1498Szrj err = true; 884*38fd1498Szrj } 885*38fd1498Szrj 886*38fd1498Szrj if (check_abnormal 887*38fd1498Szrj && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name)) 888*38fd1498Szrj { 889*38fd1498Szrj error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set"); 890*38fd1498Szrj err = true; 891*38fd1498Szrj } 892*38fd1498Szrj 893*38fd1498Szrj /* Make sure the use is in an appropriate list by checking the previous 894*38fd1498Szrj element to make sure it's the same. */ 895*38fd1498Szrj if (use_p->prev == NULL) 896*38fd1498Szrj { 897*38fd1498Szrj error ("no immediate_use list"); 898*38fd1498Szrj err = true; 899*38fd1498Szrj } 900*38fd1498Szrj else 901*38fd1498Szrj { 902*38fd1498Szrj tree listvar; 903*38fd1498Szrj if (use_p->prev->use == NULL) 904*38fd1498Szrj listvar = use_p->prev->loc.ssa_name; 905*38fd1498Szrj else 906*38fd1498Szrj listvar = USE_FROM_PTR (use_p->prev); 907*38fd1498Szrj if (listvar != ssa_name) 908*38fd1498Szrj { 909*38fd1498Szrj error ("wrong immediate use list"); 910*38fd1498Szrj err = true; 911*38fd1498Szrj } 912*38fd1498Szrj } 913*38fd1498Szrj 914*38fd1498Szrj if (err) 915*38fd1498Szrj { 916*38fd1498Szrj fprintf (stderr, "for SSA_NAME: "); 917*38fd1498Szrj print_generic_expr (stderr, ssa_name, TDF_VOPS); 918*38fd1498Szrj fprintf (stderr, " in statement:\n"); 919*38fd1498Szrj print_gimple_stmt (stderr, stmt, 0, TDF_VOPS); 920*38fd1498Szrj } 921*38fd1498Szrj 922*38fd1498Szrj return err; 923*38fd1498Szrj } 924*38fd1498Szrj 925*38fd1498Szrj 926*38fd1498Szrj /* Return true if any of the arguments for PHI node PHI at block BB is 927*38fd1498Szrj malformed. 928*38fd1498Szrj 929*38fd1498Szrj DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME 930*38fd1498Szrj version numbers. If DEFINITION_BLOCK[SSA_NAME_VERSION] is set, 931*38fd1498Szrj it means that the block in that array slot contains the 932*38fd1498Szrj definition of SSA_NAME. */ 933*38fd1498Szrj 934*38fd1498Szrj static bool 935*38fd1498Szrj verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block) 936*38fd1498Szrj { 937*38fd1498Szrj edge e; 938*38fd1498Szrj bool err = false; 939*38fd1498Szrj size_t i, phi_num_args = gimple_phi_num_args (phi); 940*38fd1498Szrj 941*38fd1498Szrj if (EDGE_COUNT (bb->preds) != phi_num_args) 942*38fd1498Szrj { 943*38fd1498Szrj error ("incoming edge count does not match number of PHI arguments"); 944*38fd1498Szrj err = true; 945*38fd1498Szrj goto error; 946*38fd1498Szrj } 947*38fd1498Szrj 948*38fd1498Szrj for (i = 0; i < phi_num_args; i++) 949*38fd1498Szrj { 950*38fd1498Szrj use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i); 951*38fd1498Szrj tree op = USE_FROM_PTR (op_p); 952*38fd1498Szrj 953*38fd1498Szrj e = EDGE_PRED (bb, i); 954*38fd1498Szrj 955*38fd1498Szrj if (op == NULL_TREE) 956*38fd1498Szrj { 957*38fd1498Szrj error ("PHI argument is missing for edge %d->%d", 958*38fd1498Szrj e->src->index, 959*38fd1498Szrj e->dest->index); 960*38fd1498Szrj err = true; 961*38fd1498Szrj goto error; 962*38fd1498Szrj } 963*38fd1498Szrj 964*38fd1498Szrj if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op)) 965*38fd1498Szrj { 966*38fd1498Szrj error ("PHI argument is not SSA_NAME, or invariant"); 967*38fd1498Szrj err = true; 968*38fd1498Szrj } 969*38fd1498Szrj 970*38fd1498Szrj if (TREE_CODE (op) == SSA_NAME) 971*38fd1498Szrj { 972*38fd1498Szrj err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi))); 973*38fd1498Szrj err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)], 974*38fd1498Szrj op_p, phi, e->flags & EDGE_ABNORMAL, NULL); 975*38fd1498Szrj } 976*38fd1498Szrj 977*38fd1498Szrj if (TREE_CODE (op) == ADDR_EXPR) 978*38fd1498Szrj { 979*38fd1498Szrj tree base = TREE_OPERAND (op, 0); 980*38fd1498Szrj while (handled_component_p (base)) 981*38fd1498Szrj base = TREE_OPERAND (base, 0); 982*38fd1498Szrj if ((VAR_P (base) 983*38fd1498Szrj || TREE_CODE (base) == PARM_DECL 984*38fd1498Szrj || TREE_CODE (base) == RESULT_DECL) 985*38fd1498Szrj && !TREE_ADDRESSABLE (base)) 986*38fd1498Szrj { 987*38fd1498Szrj error ("address taken, but ADDRESSABLE bit not set"); 988*38fd1498Szrj err = true; 989*38fd1498Szrj } 990*38fd1498Szrj } 991*38fd1498Szrj 992*38fd1498Szrj if (e->dest != bb) 993*38fd1498Szrj { 994*38fd1498Szrj error ("wrong edge %d->%d for PHI argument", 995*38fd1498Szrj e->src->index, e->dest->index); 996*38fd1498Szrj err = true; 997*38fd1498Szrj } 998*38fd1498Szrj 999*38fd1498Szrj if (err) 1000*38fd1498Szrj { 1001*38fd1498Szrj fprintf (stderr, "PHI argument\n"); 1002*38fd1498Szrj print_generic_stmt (stderr, op, TDF_VOPS); 1003*38fd1498Szrj goto error; 1004*38fd1498Szrj } 1005*38fd1498Szrj } 1006*38fd1498Szrj 1007*38fd1498Szrj error: 1008*38fd1498Szrj if (err) 1009*38fd1498Szrj { 1010*38fd1498Szrj fprintf (stderr, "for PHI node\n"); 1011*38fd1498Szrj print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS); 1012*38fd1498Szrj } 1013*38fd1498Szrj 1014*38fd1498Szrj 1015*38fd1498Szrj return err; 1016*38fd1498Szrj } 1017*38fd1498Szrj 1018*38fd1498Szrj 1019*38fd1498Szrj /* Verify common invariants in the SSA web. 1020*38fd1498Szrj TODO: verify the variable annotations. */ 1021*38fd1498Szrj 1022*38fd1498Szrj DEBUG_FUNCTION void 1023*38fd1498Szrj verify_ssa (bool check_modified_stmt, bool check_ssa_operands) 1024*38fd1498Szrj { 1025*38fd1498Szrj basic_block bb; 1026*38fd1498Szrj basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names); 1027*38fd1498Szrj ssa_op_iter iter; 1028*38fd1498Szrj tree op; 1029*38fd1498Szrj enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS); 1030*38fd1498Szrj auto_bitmap names_defined_in_bb; 1031*38fd1498Szrj 1032*38fd1498Szrj gcc_assert (!need_ssa_update_p (cfun)); 1033*38fd1498Szrj 1034*38fd1498Szrj timevar_push (TV_TREE_SSA_VERIFY); 1035*38fd1498Szrj 1036*38fd1498Szrj { 1037*38fd1498Szrj /* Keep track of SSA names present in the IL. */ 1038*38fd1498Szrj size_t i; 1039*38fd1498Szrj tree name; 1040*38fd1498Szrj hash_map <void *, tree> ssa_info; 1041*38fd1498Szrj 1042*38fd1498Szrj FOR_EACH_SSA_NAME (i, name, cfun) 1043*38fd1498Szrj { 1044*38fd1498Szrj gimple *stmt; 1045*38fd1498Szrj TREE_VISITED (name) = 0; 1046*38fd1498Szrj 1047*38fd1498Szrj verify_ssa_name (name, virtual_operand_p (name)); 1048*38fd1498Szrj 1049*38fd1498Szrj stmt = SSA_NAME_DEF_STMT (name); 1050*38fd1498Szrj if (!gimple_nop_p (stmt)) 1051*38fd1498Szrj { 1052*38fd1498Szrj basic_block bb = gimple_bb (stmt); 1053*38fd1498Szrj if (verify_def (bb, definition_block, 1054*38fd1498Szrj name, stmt, virtual_operand_p (name))) 1055*38fd1498Szrj goto err; 1056*38fd1498Szrj } 1057*38fd1498Szrj 1058*38fd1498Szrj void *info = NULL; 1059*38fd1498Szrj if (POINTER_TYPE_P (TREE_TYPE (name))) 1060*38fd1498Szrj info = SSA_NAME_PTR_INFO (name); 1061*38fd1498Szrj else if (INTEGRAL_TYPE_P (TREE_TYPE (name))) 1062*38fd1498Szrj info = SSA_NAME_RANGE_INFO (name); 1063*38fd1498Szrj if (info) 1064*38fd1498Szrj { 1065*38fd1498Szrj bool existed; 1066*38fd1498Szrj tree &val = ssa_info.get_or_insert (info, &existed); 1067*38fd1498Szrj if (existed) 1068*38fd1498Szrj { 1069*38fd1498Szrj error ("shared SSA name info"); 1070*38fd1498Szrj print_generic_expr (stderr, val); 1071*38fd1498Szrj fprintf (stderr, " and "); 1072*38fd1498Szrj print_generic_expr (stderr, name); 1073*38fd1498Szrj fprintf (stderr, "\n"); 1074*38fd1498Szrj goto err; 1075*38fd1498Szrj } 1076*38fd1498Szrj else 1077*38fd1498Szrj val = name; 1078*38fd1498Szrj } 1079*38fd1498Szrj } 1080*38fd1498Szrj } 1081*38fd1498Szrj 1082*38fd1498Szrj calculate_dominance_info (CDI_DOMINATORS); 1083*38fd1498Szrj 1084*38fd1498Szrj /* Now verify all the uses and make sure they agree with the definitions 1085*38fd1498Szrj found in the previous pass. */ 1086*38fd1498Szrj FOR_EACH_BB_FN (bb, cfun) 1087*38fd1498Szrj { 1088*38fd1498Szrj edge e; 1089*38fd1498Szrj edge_iterator ei; 1090*38fd1498Szrj 1091*38fd1498Szrj /* Make sure that all edges have a clear 'aux' field. */ 1092*38fd1498Szrj FOR_EACH_EDGE (e, ei, bb->preds) 1093*38fd1498Szrj { 1094*38fd1498Szrj if (e->aux) 1095*38fd1498Szrj { 1096*38fd1498Szrj error ("AUX pointer initialized for edge %d->%d", e->src->index, 1097*38fd1498Szrj e->dest->index); 1098*38fd1498Szrj goto err; 1099*38fd1498Szrj } 1100*38fd1498Szrj } 1101*38fd1498Szrj 1102*38fd1498Szrj /* Verify the arguments for every PHI node in the block. */ 1103*38fd1498Szrj for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 1104*38fd1498Szrj { 1105*38fd1498Szrj gphi *phi = gsi.phi (); 1106*38fd1498Szrj if (verify_phi_args (phi, bb, definition_block)) 1107*38fd1498Szrj goto err; 1108*38fd1498Szrj 1109*38fd1498Szrj bitmap_set_bit (names_defined_in_bb, 1110*38fd1498Szrj SSA_NAME_VERSION (gimple_phi_result (phi))); 1111*38fd1498Szrj } 1112*38fd1498Szrj 1113*38fd1498Szrj /* Now verify all the uses and vuses in every statement of the block. */ 1114*38fd1498Szrj for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); 1115*38fd1498Szrj gsi_next (&gsi)) 1116*38fd1498Szrj { 1117*38fd1498Szrj gimple *stmt = gsi_stmt (gsi); 1118*38fd1498Szrj use_operand_p use_p; 1119*38fd1498Szrj 1120*38fd1498Szrj if (check_modified_stmt && gimple_modified_p (stmt)) 1121*38fd1498Szrj { 1122*38fd1498Szrj error ("stmt (%p) marked modified after optimization pass: ", 1123*38fd1498Szrj (void *)stmt); 1124*38fd1498Szrj print_gimple_stmt (stderr, stmt, 0, TDF_VOPS); 1125*38fd1498Szrj goto err; 1126*38fd1498Szrj } 1127*38fd1498Szrj 1128*38fd1498Szrj if (check_ssa_operands && verify_ssa_operands (cfun, stmt)) 1129*38fd1498Szrj { 1130*38fd1498Szrj print_gimple_stmt (stderr, stmt, 0, TDF_VOPS); 1131*38fd1498Szrj goto err; 1132*38fd1498Szrj } 1133*38fd1498Szrj 1134*38fd1498Szrj if (gimple_debug_bind_p (stmt) 1135*38fd1498Szrj && !gimple_debug_bind_has_value_p (stmt)) 1136*38fd1498Szrj continue; 1137*38fd1498Szrj 1138*38fd1498Szrj FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE) 1139*38fd1498Szrj { 1140*38fd1498Szrj op = USE_FROM_PTR (use_p); 1141*38fd1498Szrj if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)], 1142*38fd1498Szrj use_p, stmt, false, names_defined_in_bb)) 1143*38fd1498Szrj goto err; 1144*38fd1498Szrj } 1145*38fd1498Szrj 1146*38fd1498Szrj FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS) 1147*38fd1498Szrj { 1148*38fd1498Szrj if (SSA_NAME_DEF_STMT (op) != stmt) 1149*38fd1498Szrj { 1150*38fd1498Szrj error ("SSA_NAME_DEF_STMT is wrong"); 1151*38fd1498Szrj fprintf (stderr, "Expected definition statement:\n"); 1152*38fd1498Szrj print_gimple_stmt (stderr, stmt, 4, TDF_VOPS); 1153*38fd1498Szrj fprintf (stderr, "\nActual definition statement:\n"); 1154*38fd1498Szrj print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op), 1155*38fd1498Szrj 4, TDF_VOPS); 1156*38fd1498Szrj goto err; 1157*38fd1498Szrj } 1158*38fd1498Szrj bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op)); 1159*38fd1498Szrj } 1160*38fd1498Szrj } 1161*38fd1498Szrj 1162*38fd1498Szrj bitmap_clear (names_defined_in_bb); 1163*38fd1498Szrj } 1164*38fd1498Szrj 1165*38fd1498Szrj free (definition_block); 1166*38fd1498Szrj 1167*38fd1498Szrj if (gimple_vop (cfun) 1168*38fd1498Szrj && ssa_default_def (cfun, gimple_vop (cfun))) 1169*38fd1498Szrj { 1170*38fd1498Szrj auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1); 1171*38fd1498Szrj bitmap_clear (visited); 1172*38fd1498Szrj if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun), 1173*38fd1498Szrj ssa_default_def (cfun, gimple_vop (cfun)), visited)) 1174*38fd1498Szrj goto err; 1175*38fd1498Szrj } 1176*38fd1498Szrj 1177*38fd1498Szrj /* Restore the dominance information to its prior known state, so 1178*38fd1498Szrj that we do not perturb the compiler's subsequent behavior. */ 1179*38fd1498Szrj if (orig_dom_state == DOM_NONE) 1180*38fd1498Szrj free_dominance_info (CDI_DOMINATORS); 1181*38fd1498Szrj else 1182*38fd1498Szrj set_dom_info_availability (CDI_DOMINATORS, orig_dom_state); 1183*38fd1498Szrj 1184*38fd1498Szrj timevar_pop (TV_TREE_SSA_VERIFY); 1185*38fd1498Szrj return; 1186*38fd1498Szrj 1187*38fd1498Szrj err: 1188*38fd1498Szrj internal_error ("verify_ssa failed"); 1189*38fd1498Szrj } 1190*38fd1498Szrj 1191*38fd1498Szrj 1192*38fd1498Szrj /* Initialize global DFA and SSA structures. */ 1193*38fd1498Szrj 1194*38fd1498Szrj void 1195*38fd1498Szrj init_tree_ssa (struct function *fn) 1196*38fd1498Szrj { 1197*38fd1498Szrj fn->gimple_df = ggc_cleared_alloc<gimple_df> (); 1198*38fd1498Szrj fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20); 1199*38fd1498Szrj pt_solution_reset (&fn->gimple_df->escaped); 1200*38fd1498Szrj init_ssanames (fn, 0); 1201*38fd1498Szrj } 1202*38fd1498Szrj 1203*38fd1498Szrj /* Deallocate memory associated with SSA data structures for FNDECL. */ 1204*38fd1498Szrj 1205*38fd1498Szrj void 1206*38fd1498Szrj delete_tree_ssa (struct function *fn) 1207*38fd1498Szrj { 1208*38fd1498Szrj fini_ssanames (fn); 1209*38fd1498Szrj 1210*38fd1498Szrj /* We no longer maintain the SSA operand cache at this point. */ 1211*38fd1498Szrj if (ssa_operands_active (fn)) 1212*38fd1498Szrj fini_ssa_operands (fn); 1213*38fd1498Szrj 1214*38fd1498Szrj fn->gimple_df->default_defs->empty (); 1215*38fd1498Szrj fn->gimple_df->default_defs = NULL; 1216*38fd1498Szrj pt_solution_reset (&fn->gimple_df->escaped); 1217*38fd1498Szrj if (fn->gimple_df->decls_to_pointers != NULL) 1218*38fd1498Szrj delete fn->gimple_df->decls_to_pointers; 1219*38fd1498Szrj fn->gimple_df->decls_to_pointers = NULL; 1220*38fd1498Szrj fn->gimple_df = NULL; 1221*38fd1498Szrj 1222*38fd1498Szrj /* We no longer need the edge variable maps. */ 1223*38fd1498Szrj redirect_edge_var_map_empty (); 1224*38fd1498Szrj } 1225*38fd1498Szrj 1226*38fd1498Szrj /* Return true if EXPR is a useless type conversion, otherwise return 1227*38fd1498Szrj false. */ 1228*38fd1498Szrj 1229*38fd1498Szrj bool 1230*38fd1498Szrj tree_ssa_useless_type_conversion (tree expr) 1231*38fd1498Szrj { 1232*38fd1498Szrj /* If we have an assignment that merely uses a NOP_EXPR to change 1233*38fd1498Szrj the top of the RHS to the type of the LHS and the type conversion 1234*38fd1498Szrj is "safe", then strip away the type conversion so that we can 1235*38fd1498Szrj enter LHS = RHS into the const_and_copies table. */ 1236*38fd1498Szrj if (CONVERT_EXPR_P (expr) 1237*38fd1498Szrj || TREE_CODE (expr) == VIEW_CONVERT_EXPR 1238*38fd1498Szrj || TREE_CODE (expr) == NON_LVALUE_EXPR) 1239*38fd1498Szrj return useless_type_conversion_p 1240*38fd1498Szrj (TREE_TYPE (expr), 1241*38fd1498Szrj TREE_TYPE (TREE_OPERAND (expr, 0))); 1242*38fd1498Szrj 1243*38fd1498Szrj return false; 1244*38fd1498Szrj } 1245*38fd1498Szrj 1246*38fd1498Szrj /* Strip conversions from EXP according to 1247*38fd1498Szrj tree_ssa_useless_type_conversion and return the resulting 1248*38fd1498Szrj expression. */ 1249*38fd1498Szrj 1250*38fd1498Szrj tree 1251*38fd1498Szrj tree_ssa_strip_useless_type_conversions (tree exp) 1252*38fd1498Szrj { 1253*38fd1498Szrj while (tree_ssa_useless_type_conversion (exp)) 1254*38fd1498Szrj exp = TREE_OPERAND (exp, 0); 1255*38fd1498Szrj return exp; 1256*38fd1498Szrj } 1257*38fd1498Szrj 1258*38fd1498Szrj /* Return true if T, as SSA_NAME, has an implicit default defined value. */ 1259*38fd1498Szrj 1260*38fd1498Szrj bool 1261*38fd1498Szrj ssa_defined_default_def_p (tree t) 1262*38fd1498Szrj { 1263*38fd1498Szrj tree var = SSA_NAME_VAR (t); 1264*38fd1498Szrj 1265*38fd1498Szrj if (!var) 1266*38fd1498Szrj ; 1267*38fd1498Szrj /* Parameters get their initial value from the function entry. */ 1268*38fd1498Szrj else if (TREE_CODE (var) == PARM_DECL) 1269*38fd1498Szrj return true; 1270*38fd1498Szrj /* When returning by reference the return address is actually a hidden 1271*38fd1498Szrj parameter. */ 1272*38fd1498Szrj else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var)) 1273*38fd1498Szrj return true; 1274*38fd1498Szrj /* Hard register variables get their initial value from the ether. */ 1275*38fd1498Szrj else if (VAR_P (var) && DECL_HARD_REGISTER (var)) 1276*38fd1498Szrj return true; 1277*38fd1498Szrj 1278*38fd1498Szrj return false; 1279*38fd1498Szrj } 1280*38fd1498Szrj 1281*38fd1498Szrj 1282*38fd1498Szrj /* Return true if T, an SSA_NAME, has an undefined value. PARTIAL is what 1283*38fd1498Szrj should be returned if the value is only partially undefined. */ 1284*38fd1498Szrj 1285*38fd1498Szrj bool 1286*38fd1498Szrj ssa_undefined_value_p (tree t, bool partial) 1287*38fd1498Szrj { 1288*38fd1498Szrj gimple *def_stmt; 1289*38fd1498Szrj 1290*38fd1498Szrj if (ssa_defined_default_def_p (t)) 1291*38fd1498Szrj return false; 1292*38fd1498Szrj 1293*38fd1498Szrj /* The value is undefined iff its definition statement is empty. */ 1294*38fd1498Szrj def_stmt = SSA_NAME_DEF_STMT (t); 1295*38fd1498Szrj if (gimple_nop_p (def_stmt)) 1296*38fd1498Szrj return true; 1297*38fd1498Szrj 1298*38fd1498Szrj /* Check if the complex was not only partially defined. */ 1299*38fd1498Szrj if (partial && is_gimple_assign (def_stmt) 1300*38fd1498Szrj && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR) 1301*38fd1498Szrj { 1302*38fd1498Szrj tree rhs1, rhs2; 1303*38fd1498Szrj 1304*38fd1498Szrj rhs1 = gimple_assign_rhs1 (def_stmt); 1305*38fd1498Szrj rhs2 = gimple_assign_rhs2 (def_stmt); 1306*38fd1498Szrj return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1)) 1307*38fd1498Szrj || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2)); 1308*38fd1498Szrj } 1309*38fd1498Szrj return false; 1310*38fd1498Szrj } 1311*38fd1498Szrj 1312*38fd1498Szrj 1313*38fd1498Szrj /* Return TRUE iff STMT, a gimple statement, references an undefined 1314*38fd1498Szrj SSA name. */ 1315*38fd1498Szrj 1316*38fd1498Szrj bool 1317*38fd1498Szrj gimple_uses_undefined_value_p (gimple *stmt) 1318*38fd1498Szrj { 1319*38fd1498Szrj ssa_op_iter iter; 1320*38fd1498Szrj tree op; 1321*38fd1498Szrj 1322*38fd1498Szrj FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE) 1323*38fd1498Szrj if (ssa_undefined_value_p (op)) 1324*38fd1498Szrj return true; 1325*38fd1498Szrj 1326*38fd1498Szrj return false; 1327*38fd1498Szrj } 1328*38fd1498Szrj 1329*38fd1498Szrj 1330*38fd1498Szrj 1331*38fd1498Szrj /* If necessary, rewrite the base of the reference tree *TP from 1332*38fd1498Szrj a MEM_REF to a plain or converted symbol. */ 1333*38fd1498Szrj 1334*38fd1498Szrj static void 1335*38fd1498Szrj maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming) 1336*38fd1498Szrj { 1337*38fd1498Szrj tree sym; 1338*38fd1498Szrj 1339*38fd1498Szrj while (handled_component_p (*tp)) 1340*38fd1498Szrj tp = &TREE_OPERAND (*tp, 0); 1341*38fd1498Szrj if (TREE_CODE (*tp) == MEM_REF 1342*38fd1498Szrj && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR 1343*38fd1498Szrj && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0)) 1344*38fd1498Szrj && DECL_P (sym) 1345*38fd1498Szrj && !TREE_ADDRESSABLE (sym) 1346*38fd1498Szrj && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)) 1347*38fd1498Szrj && is_gimple_reg_type (TREE_TYPE (*tp)) 1348*38fd1498Szrj && ! VOID_TYPE_P (TREE_TYPE (*tp))) 1349*38fd1498Szrj { 1350*38fd1498Szrj if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE 1351*38fd1498Szrj && useless_type_conversion_p (TREE_TYPE (*tp), 1352*38fd1498Szrj TREE_TYPE (TREE_TYPE (sym))) 1353*38fd1498Szrj && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1), 1354*38fd1498Szrj TYPE_SIZE_UNIT (TREE_TYPE (*tp)))) 1355*38fd1498Szrj { 1356*38fd1498Szrj *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym, 1357*38fd1498Szrj TYPE_SIZE (TREE_TYPE (*tp)), 1358*38fd1498Szrj int_const_binop (MULT_EXPR, 1359*38fd1498Szrj bitsize_int (BITS_PER_UNIT), 1360*38fd1498Szrj TREE_OPERAND (*tp, 1))); 1361*38fd1498Szrj } 1362*38fd1498Szrj else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE 1363*38fd1498Szrj && useless_type_conversion_p (TREE_TYPE (*tp), 1364*38fd1498Szrj TREE_TYPE (TREE_TYPE (sym)))) 1365*38fd1498Szrj { 1366*38fd1498Szrj *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1)) 1367*38fd1498Szrj ? REALPART_EXPR : IMAGPART_EXPR, 1368*38fd1498Szrj TREE_TYPE (*tp), sym); 1369*38fd1498Szrj } 1370*38fd1498Szrj else if (integer_zerop (TREE_OPERAND (*tp, 1)) 1371*38fd1498Szrj && DECL_SIZE (sym) == TYPE_SIZE (TREE_TYPE (*tp))) 1372*38fd1498Szrj { 1373*38fd1498Szrj if (!useless_type_conversion_p (TREE_TYPE (*tp), 1374*38fd1498Szrj TREE_TYPE (sym))) 1375*38fd1498Szrj *tp = build1 (VIEW_CONVERT_EXPR, 1376*38fd1498Szrj TREE_TYPE (*tp), sym); 1377*38fd1498Szrj else 1378*38fd1498Szrj *tp = sym; 1379*38fd1498Szrj } 1380*38fd1498Szrj else if (DECL_SIZE (sym) 1381*38fd1498Szrj && TREE_CODE (DECL_SIZE (sym)) == INTEGER_CST 1382*38fd1498Szrj && (known_subrange_p 1383*38fd1498Szrj (mem_ref_offset (*tp), 1384*38fd1498Szrj wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))), 1385*38fd1498Szrj 0, wi::to_offset (DECL_SIZE_UNIT (sym)))) 1386*38fd1498Szrj && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp)) 1387*38fd1498Szrj || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp))) 1388*38fd1498Szrj == TYPE_PRECISION (TREE_TYPE (*tp)))) 1389*38fd1498Szrj && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp))), 1390*38fd1498Szrj BITS_PER_UNIT) == 0) 1391*38fd1498Szrj { 1392*38fd1498Szrj *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym, 1393*38fd1498Szrj TYPE_SIZE (TREE_TYPE (*tp)), 1394*38fd1498Szrj wide_int_to_tree (bitsizetype, 1395*38fd1498Szrj mem_ref_offset (*tp) 1396*38fd1498Szrj << LOG2_BITS_PER_UNIT)); 1397*38fd1498Szrj } 1398*38fd1498Szrj } 1399*38fd1498Szrj } 1400*38fd1498Szrj 1401*38fd1498Szrj /* For a tree REF return its base if it is the base of a MEM_REF 1402*38fd1498Szrj that cannot be rewritten into SSA form. Otherwise return NULL_TREE. */ 1403*38fd1498Szrj 1404*38fd1498Szrj static tree 1405*38fd1498Szrj non_rewritable_mem_ref_base (tree ref) 1406*38fd1498Szrj { 1407*38fd1498Szrj tree base; 1408*38fd1498Szrj 1409*38fd1498Szrj /* A plain decl does not need it set. */ 1410*38fd1498Szrj if (DECL_P (ref)) 1411*38fd1498Szrj return NULL_TREE; 1412*38fd1498Szrj 1413*38fd1498Szrj if (! (base = CONST_CAST_TREE (strip_invariant_refs (ref)))) 1414*38fd1498Szrj { 1415*38fd1498Szrj base = get_base_address (ref); 1416*38fd1498Szrj if (DECL_P (base)) 1417*38fd1498Szrj return base; 1418*38fd1498Szrj return NULL_TREE; 1419*38fd1498Szrj } 1420*38fd1498Szrj 1421*38fd1498Szrj /* But watch out for MEM_REFs we cannot lower to a 1422*38fd1498Szrj VIEW_CONVERT_EXPR or a BIT_FIELD_REF. */ 1423*38fd1498Szrj if (TREE_CODE (base) == MEM_REF 1424*38fd1498Szrj && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR) 1425*38fd1498Szrj { 1426*38fd1498Szrj tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0); 1427*38fd1498Szrj if (! DECL_P (decl)) 1428*38fd1498Szrj return NULL_TREE; 1429*38fd1498Szrj if (! is_gimple_reg_type (TREE_TYPE (base)) 1430*38fd1498Szrj || VOID_TYPE_P (TREE_TYPE (base)) 1431*38fd1498Szrj || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base)) 1432*38fd1498Szrj return decl; 1433*38fd1498Szrj if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE 1434*38fd1498Szrj || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE) 1435*38fd1498Szrj && useless_type_conversion_p (TREE_TYPE (base), 1436*38fd1498Szrj TREE_TYPE (TREE_TYPE (decl))) 1437*38fd1498Szrj && known_ge (mem_ref_offset (base), 0) 1438*38fd1498Szrj && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))), 1439*38fd1498Szrj mem_ref_offset (base)) 1440*38fd1498Szrj && multiple_of_p (sizetype, TREE_OPERAND (base, 1), 1441*38fd1498Szrj TYPE_SIZE_UNIT (TREE_TYPE (base)))) 1442*38fd1498Szrj return NULL_TREE; 1443*38fd1498Szrj /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR. */ 1444*38fd1498Szrj if (integer_zerop (TREE_OPERAND (base, 1)) 1445*38fd1498Szrj && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (base))) 1446*38fd1498Szrj return NULL_TREE; 1447*38fd1498Szrj /* For integral typed extracts we can use a BIT_FIELD_REF. */ 1448*38fd1498Szrj if (DECL_SIZE (decl) 1449*38fd1498Szrj && (known_subrange_p 1450*38fd1498Szrj (mem_ref_offset (base), 1451*38fd1498Szrj wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))), 1452*38fd1498Szrj 0, wi::to_poly_offset (DECL_SIZE_UNIT (decl)))) 1453*38fd1498Szrj /* ??? We can't handle bitfield precision extracts without 1454*38fd1498Szrj either using an alternate type for the BIT_FIELD_REF and 1455*38fd1498Szrj then doing a conversion or possibly adjusting the offset 1456*38fd1498Szrj according to endianness. */ 1457*38fd1498Szrj && (! INTEGRAL_TYPE_P (TREE_TYPE (base)) 1458*38fd1498Szrj || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base))) 1459*38fd1498Szrj == TYPE_PRECISION (TREE_TYPE (base)))) 1460*38fd1498Szrj && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (base))), 1461*38fd1498Szrj BITS_PER_UNIT) == 0) 1462*38fd1498Szrj return NULL_TREE; 1463*38fd1498Szrj return decl; 1464*38fd1498Szrj } 1465*38fd1498Szrj 1466*38fd1498Szrj return NULL_TREE; 1467*38fd1498Szrj } 1468*38fd1498Szrj 1469*38fd1498Szrj /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form. 1470*38fd1498Szrj Otherwise return true. */ 1471*38fd1498Szrj 1472*38fd1498Szrj static bool 1473*38fd1498Szrj non_rewritable_lvalue_p (tree lhs) 1474*38fd1498Szrj { 1475*38fd1498Szrj /* A plain decl is always rewritable. */ 1476*38fd1498Szrj if (DECL_P (lhs)) 1477*38fd1498Szrj return false; 1478*38fd1498Szrj 1479*38fd1498Szrj /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in 1480*38fd1498Szrj a reasonably efficient manner... */ 1481*38fd1498Szrj if ((TREE_CODE (lhs) == REALPART_EXPR 1482*38fd1498Szrj || TREE_CODE (lhs) == IMAGPART_EXPR) 1483*38fd1498Szrj && DECL_P (TREE_OPERAND (lhs, 0))) 1484*38fd1498Szrj return false; 1485*38fd1498Szrj 1486*38fd1498Szrj /* ??? The following could be relaxed allowing component 1487*38fd1498Szrj references that do not change the access size. */ 1488*38fd1498Szrj if (TREE_CODE (lhs) == MEM_REF 1489*38fd1498Szrj && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR) 1490*38fd1498Szrj { 1491*38fd1498Szrj tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0); 1492*38fd1498Szrj 1493*38fd1498Szrj /* A decl that is wrapped inside a MEM-REF that covers 1494*38fd1498Szrj it full is also rewritable. */ 1495*38fd1498Szrj if (integer_zerop (TREE_OPERAND (lhs, 1)) 1496*38fd1498Szrj && DECL_P (decl) 1497*38fd1498Szrj && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs)) 1498*38fd1498Szrj /* If the dynamic type of the decl has larger precision than 1499*38fd1498Szrj the decl itself we can't use the decls type for SSA rewriting. */ 1500*38fd1498Szrj && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl)) 1501*38fd1498Szrj || compare_tree_int (DECL_SIZE (decl), 1502*38fd1498Szrj TYPE_PRECISION (TREE_TYPE (decl))) == 0) 1503*38fd1498Szrj || (INTEGRAL_TYPE_P (TREE_TYPE (lhs)) 1504*38fd1498Szrj && (TYPE_PRECISION (TREE_TYPE (decl)) 1505*38fd1498Szrj >= TYPE_PRECISION (TREE_TYPE (lhs))))) 1506*38fd1498Szrj /* Make sure we are not re-writing non-float copying into float 1507*38fd1498Szrj copying as that can incur normalization. */ 1508*38fd1498Szrj && (! FLOAT_TYPE_P (TREE_TYPE (decl)) 1509*38fd1498Szrj || types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (decl))) 1510*38fd1498Szrj && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs))) 1511*38fd1498Szrj return false; 1512*38fd1498Szrj 1513*38fd1498Szrj /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable 1514*38fd1498Szrj using a BIT_INSERT_EXPR. */ 1515*38fd1498Szrj if (DECL_P (decl) 1516*38fd1498Szrj && VECTOR_TYPE_P (TREE_TYPE (decl)) 1517*38fd1498Szrj && TYPE_MODE (TREE_TYPE (decl)) != BLKmode 1518*38fd1498Szrj && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)), 1519*38fd1498Szrj TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))), 0) 1520*38fd1498Szrj && known_ge (mem_ref_offset (lhs), 0) 1521*38fd1498Szrj && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))), 1522*38fd1498Szrj mem_ref_offset (lhs)) 1523*38fd1498Szrj && multiple_of_p (sizetype, TREE_OPERAND (lhs, 1), 1524*38fd1498Szrj TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) 1525*38fd1498Szrj return false; 1526*38fd1498Szrj } 1527*38fd1498Szrj 1528*38fd1498Szrj /* A vector-insert using a BIT_FIELD_REF is rewritable using 1529*38fd1498Szrj BIT_INSERT_EXPR. */ 1530*38fd1498Szrj if (TREE_CODE (lhs) == BIT_FIELD_REF 1531*38fd1498Szrj && DECL_P (TREE_OPERAND (lhs, 0)) 1532*38fd1498Szrj && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0))) 1533*38fd1498Szrj && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode 1534*38fd1498Szrj && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)), 1535*38fd1498Szrj TYPE_SIZE_UNIT 1536*38fd1498Szrj (TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs, 0)))), 0) 1537*38fd1498Szrj && (tree_to_uhwi (TREE_OPERAND (lhs, 2)) 1538*38fd1498Szrj % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))) == 0) 1539*38fd1498Szrj return false; 1540*38fd1498Szrj 1541*38fd1498Szrj return true; 1542*38fd1498Szrj } 1543*38fd1498Szrj 1544*38fd1498Szrj /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and 1545*38fd1498Szrj mark the variable VAR for conversion into SSA. Return true when updating 1546*38fd1498Szrj stmts is required. */ 1547*38fd1498Szrj 1548*38fd1498Szrj static void 1549*38fd1498Szrj maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs, 1550*38fd1498Szrj bitmap suitable_for_renaming) 1551*38fd1498Szrj { 1552*38fd1498Szrj /* Global Variables, result decls cannot be changed. */ 1553*38fd1498Szrj if (is_global_var (var) 1554*38fd1498Szrj || TREE_CODE (var) == RESULT_DECL 1555*38fd1498Szrj || bitmap_bit_p (addresses_taken, DECL_UID (var))) 1556*38fd1498Szrj return; 1557*38fd1498Szrj 1558*38fd1498Szrj if (TREE_ADDRESSABLE (var) 1559*38fd1498Szrj /* Do not change TREE_ADDRESSABLE if we need to preserve var as 1560*38fd1498Szrj a non-register. Otherwise we are confused and forget to 1561*38fd1498Szrj add virtual operands for it. */ 1562*38fd1498Szrj && (!is_gimple_reg_type (TREE_TYPE (var)) 1563*38fd1498Szrj || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE 1564*38fd1498Szrj || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE 1565*38fd1498Szrj || !bitmap_bit_p (not_reg_needs, DECL_UID (var)))) 1566*38fd1498Szrj { 1567*38fd1498Szrj TREE_ADDRESSABLE (var) = 0; 1568*38fd1498Szrj if (is_gimple_reg (var)) 1569*38fd1498Szrj bitmap_set_bit (suitable_for_renaming, DECL_UID (var)); 1570*38fd1498Szrj if (dump_file) 1571*38fd1498Szrj { 1572*38fd1498Szrj fprintf (dump_file, "No longer having address taken: "); 1573*38fd1498Szrj print_generic_expr (dump_file, var); 1574*38fd1498Szrj fprintf (dump_file, "\n"); 1575*38fd1498Szrj } 1576*38fd1498Szrj } 1577*38fd1498Szrj 1578*38fd1498Szrj if (!DECL_GIMPLE_REG_P (var) 1579*38fd1498Szrj && !bitmap_bit_p (not_reg_needs, DECL_UID (var)) 1580*38fd1498Szrj && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE 1581*38fd1498Szrj || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE) 1582*38fd1498Szrj && !TREE_THIS_VOLATILE (var) 1583*38fd1498Szrj && (!VAR_P (var) || !DECL_HARD_REGISTER (var))) 1584*38fd1498Szrj { 1585*38fd1498Szrj DECL_GIMPLE_REG_P (var) = 1; 1586*38fd1498Szrj bitmap_set_bit (suitable_for_renaming, DECL_UID (var)); 1587*38fd1498Szrj if (dump_file) 1588*38fd1498Szrj { 1589*38fd1498Szrj fprintf (dump_file, "Now a gimple register: "); 1590*38fd1498Szrj print_generic_expr (dump_file, var); 1591*38fd1498Szrj fprintf (dump_file, "\n"); 1592*38fd1498Szrj } 1593*38fd1498Szrj } 1594*38fd1498Szrj } 1595*38fd1498Szrj 1596*38fd1498Szrj /* Return true when STMT is ASAN mark where second argument is an address 1597*38fd1498Szrj of a local variable. */ 1598*38fd1498Szrj 1599*38fd1498Szrj static bool 1600*38fd1498Szrj is_asan_mark_p (gimple *stmt) 1601*38fd1498Szrj { 1602*38fd1498Szrj if (!gimple_call_internal_p (stmt, IFN_ASAN_MARK)) 1603*38fd1498Szrj return false; 1604*38fd1498Szrj 1605*38fd1498Szrj tree addr = get_base_address (gimple_call_arg (stmt, 1)); 1606*38fd1498Szrj if (TREE_CODE (addr) == ADDR_EXPR 1607*38fd1498Szrj && VAR_P (TREE_OPERAND (addr, 0))) 1608*38fd1498Szrj { 1609*38fd1498Szrj tree var = TREE_OPERAND (addr, 0); 1610*38fd1498Szrj if (lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE, 1611*38fd1498Szrj DECL_ATTRIBUTES (var))) 1612*38fd1498Szrj return false; 1613*38fd1498Szrj 1614*38fd1498Szrj unsigned addressable = TREE_ADDRESSABLE (var); 1615*38fd1498Szrj TREE_ADDRESSABLE (var) = 0; 1616*38fd1498Szrj bool r = is_gimple_reg (var); 1617*38fd1498Szrj TREE_ADDRESSABLE (var) = addressable; 1618*38fd1498Szrj return r; 1619*38fd1498Szrj } 1620*38fd1498Szrj 1621*38fd1498Szrj return false; 1622*38fd1498Szrj } 1623*38fd1498Szrj 1624*38fd1498Szrj /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables. */ 1625*38fd1498Szrj 1626*38fd1498Szrj void 1627*38fd1498Szrj execute_update_addresses_taken (void) 1628*38fd1498Szrj { 1629*38fd1498Szrj basic_block bb; 1630*38fd1498Szrj auto_bitmap addresses_taken; 1631*38fd1498Szrj auto_bitmap not_reg_needs; 1632*38fd1498Szrj auto_bitmap suitable_for_renaming; 1633*38fd1498Szrj tree var; 1634*38fd1498Szrj unsigned i; 1635*38fd1498Szrj 1636*38fd1498Szrj timevar_push (TV_ADDRESS_TAKEN); 1637*38fd1498Szrj 1638*38fd1498Szrj /* Collect into ADDRESSES_TAKEN all variables whose address is taken within 1639*38fd1498Szrj the function body. */ 1640*38fd1498Szrj FOR_EACH_BB_FN (bb, cfun) 1641*38fd1498Szrj { 1642*38fd1498Szrj for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi); 1643*38fd1498Szrj gsi_next (&gsi)) 1644*38fd1498Szrj { 1645*38fd1498Szrj gimple *stmt = gsi_stmt (gsi); 1646*38fd1498Szrj enum gimple_code code = gimple_code (stmt); 1647*38fd1498Szrj tree decl; 1648*38fd1498Szrj 1649*38fd1498Szrj if (code == GIMPLE_CALL) 1650*38fd1498Szrj { 1651*38fd1498Szrj if (optimize_atomic_compare_exchange_p (stmt)) 1652*38fd1498Szrj { 1653*38fd1498Szrj /* For __atomic_compare_exchange_N if the second argument 1654*38fd1498Szrj is &var, don't mark var addressable; 1655*38fd1498Szrj if it becomes non-addressable, we'll rewrite it into 1656*38fd1498Szrj ATOMIC_COMPARE_EXCHANGE call. */ 1657*38fd1498Szrj tree arg = gimple_call_arg (stmt, 1); 1658*38fd1498Szrj gimple_call_set_arg (stmt, 1, null_pointer_node); 1659*38fd1498Szrj gimple_ior_addresses_taken (addresses_taken, stmt); 1660*38fd1498Szrj gimple_call_set_arg (stmt, 1, arg); 1661*38fd1498Szrj } 1662*38fd1498Szrj else if (is_asan_mark_p (stmt) 1663*38fd1498Szrj || gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER)) 1664*38fd1498Szrj ; 1665*38fd1498Szrj else 1666*38fd1498Szrj gimple_ior_addresses_taken (addresses_taken, stmt); 1667*38fd1498Szrj } 1668*38fd1498Szrj else 1669*38fd1498Szrj /* Note all addresses taken by the stmt. */ 1670*38fd1498Szrj gimple_ior_addresses_taken (addresses_taken, stmt); 1671*38fd1498Szrj 1672*38fd1498Szrj /* If we have a call or an assignment, see if the lhs contains 1673*38fd1498Szrj a local decl that requires not to be a gimple register. */ 1674*38fd1498Szrj if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL) 1675*38fd1498Szrj { 1676*38fd1498Szrj tree lhs = gimple_get_lhs (stmt); 1677*38fd1498Szrj if (lhs 1678*38fd1498Szrj && TREE_CODE (lhs) != SSA_NAME 1679*38fd1498Szrj && ((code == GIMPLE_CALL && ! DECL_P (lhs)) 1680*38fd1498Szrj || non_rewritable_lvalue_p (lhs))) 1681*38fd1498Szrj { 1682*38fd1498Szrj decl = get_base_address (lhs); 1683*38fd1498Szrj if (DECL_P (decl)) 1684*38fd1498Szrj bitmap_set_bit (not_reg_needs, DECL_UID (decl)); 1685*38fd1498Szrj } 1686*38fd1498Szrj } 1687*38fd1498Szrj 1688*38fd1498Szrj if (gimple_assign_single_p (stmt)) 1689*38fd1498Szrj { 1690*38fd1498Szrj tree rhs = gimple_assign_rhs1 (stmt); 1691*38fd1498Szrj if ((decl = non_rewritable_mem_ref_base (rhs))) 1692*38fd1498Szrj bitmap_set_bit (not_reg_needs, DECL_UID (decl)); 1693*38fd1498Szrj } 1694*38fd1498Szrj 1695*38fd1498Szrj else if (code == GIMPLE_CALL) 1696*38fd1498Szrj { 1697*38fd1498Szrj for (i = 0; i < gimple_call_num_args (stmt); ++i) 1698*38fd1498Szrj { 1699*38fd1498Szrj tree arg = gimple_call_arg (stmt, i); 1700*38fd1498Szrj if ((decl = non_rewritable_mem_ref_base (arg))) 1701*38fd1498Szrj bitmap_set_bit (not_reg_needs, DECL_UID (decl)); 1702*38fd1498Szrj } 1703*38fd1498Szrj } 1704*38fd1498Szrj 1705*38fd1498Szrj else if (code == GIMPLE_ASM) 1706*38fd1498Szrj { 1707*38fd1498Szrj gasm *asm_stmt = as_a <gasm *> (stmt); 1708*38fd1498Szrj for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i) 1709*38fd1498Szrj { 1710*38fd1498Szrj tree link = gimple_asm_output_op (asm_stmt, i); 1711*38fd1498Szrj tree lhs = TREE_VALUE (link); 1712*38fd1498Szrj if (TREE_CODE (lhs) != SSA_NAME) 1713*38fd1498Szrj { 1714*38fd1498Szrj decl = get_base_address (lhs); 1715*38fd1498Szrj if (DECL_P (decl) 1716*38fd1498Szrj && (non_rewritable_lvalue_p (lhs) 1717*38fd1498Szrj /* We cannot move required conversions from 1718*38fd1498Szrj the lhs to the rhs in asm statements, so 1719*38fd1498Szrj require we do not need any. */ 1720*38fd1498Szrj || !useless_type_conversion_p 1721*38fd1498Szrj (TREE_TYPE (lhs), TREE_TYPE (decl)))) 1722*38fd1498Szrj bitmap_set_bit (not_reg_needs, DECL_UID (decl)); 1723*38fd1498Szrj } 1724*38fd1498Szrj } 1725*38fd1498Szrj for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) 1726*38fd1498Szrj { 1727*38fd1498Szrj tree link = gimple_asm_input_op (asm_stmt, i); 1728*38fd1498Szrj if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link)))) 1729*38fd1498Szrj bitmap_set_bit (not_reg_needs, DECL_UID (decl)); 1730*38fd1498Szrj } 1731*38fd1498Szrj } 1732*38fd1498Szrj } 1733*38fd1498Szrj 1734*38fd1498Szrj for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); 1735*38fd1498Szrj gsi_next (&gsi)) 1736*38fd1498Szrj { 1737*38fd1498Szrj size_t i; 1738*38fd1498Szrj gphi *phi = gsi.phi (); 1739*38fd1498Szrj 1740*38fd1498Szrj for (i = 0; i < gimple_phi_num_args (phi); i++) 1741*38fd1498Szrj { 1742*38fd1498Szrj tree op = PHI_ARG_DEF (phi, i), var; 1743*38fd1498Szrj if (TREE_CODE (op) == ADDR_EXPR 1744*38fd1498Szrj && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL 1745*38fd1498Szrj && DECL_P (var)) 1746*38fd1498Szrj bitmap_set_bit (addresses_taken, DECL_UID (var)); 1747*38fd1498Szrj } 1748*38fd1498Szrj } 1749*38fd1498Szrj } 1750*38fd1498Szrj 1751*38fd1498Szrj /* We cannot iterate over all referenced vars because that can contain 1752*38fd1498Szrj unused vars from BLOCK trees, which causes code generation differences 1753*38fd1498Szrj for -g vs. -g0. */ 1754*38fd1498Szrj for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var)) 1755*38fd1498Szrj maybe_optimize_var (var, addresses_taken, not_reg_needs, 1756*38fd1498Szrj suitable_for_renaming); 1757*38fd1498Szrj 1758*38fd1498Szrj FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var) 1759*38fd1498Szrj maybe_optimize_var (var, addresses_taken, not_reg_needs, 1760*38fd1498Szrj suitable_for_renaming); 1761*38fd1498Szrj 1762*38fd1498Szrj /* Operand caches need to be recomputed for operands referencing the updated 1763*38fd1498Szrj variables and operands need to be rewritten to expose bare symbols. */ 1764*38fd1498Szrj if (!bitmap_empty_p (suitable_for_renaming)) 1765*38fd1498Szrj { 1766*38fd1498Szrj FOR_EACH_BB_FN (bb, cfun) 1767*38fd1498Szrj for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);) 1768*38fd1498Szrj { 1769*38fd1498Szrj gimple *stmt = gsi_stmt (gsi); 1770*38fd1498Szrj 1771*38fd1498Szrj /* Re-write TARGET_MEM_REFs of symbols we want to 1772*38fd1498Szrj rewrite into SSA form. */ 1773*38fd1498Szrj if (gimple_assign_single_p (stmt)) 1774*38fd1498Szrj { 1775*38fd1498Szrj tree lhs = gimple_assign_lhs (stmt); 1776*38fd1498Szrj tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt); 1777*38fd1498Szrj tree sym; 1778*38fd1498Szrj 1779*38fd1498Szrj /* Rewrite LHS IMAG/REALPART_EXPR similar to 1780*38fd1498Szrj gimplify_modify_expr_complex_part. */ 1781*38fd1498Szrj if ((TREE_CODE (lhs) == IMAGPART_EXPR 1782*38fd1498Szrj || TREE_CODE (lhs) == REALPART_EXPR) 1783*38fd1498Szrj && DECL_P (TREE_OPERAND (lhs, 0)) 1784*38fd1498Szrj && bitmap_bit_p (suitable_for_renaming, 1785*38fd1498Szrj DECL_UID (TREE_OPERAND (lhs, 0)))) 1786*38fd1498Szrj { 1787*38fd1498Szrj tree other = make_ssa_name (TREE_TYPE (lhs)); 1788*38fd1498Szrj tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR 1789*38fd1498Szrj ? REALPART_EXPR : IMAGPART_EXPR, 1790*38fd1498Szrj TREE_TYPE (other), 1791*38fd1498Szrj TREE_OPERAND (lhs, 0)); 1792*38fd1498Szrj gimple *load = gimple_build_assign (other, lrhs); 1793*38fd1498Szrj location_t loc = gimple_location (stmt); 1794*38fd1498Szrj gimple_set_location (load, loc); 1795*38fd1498Szrj gimple_set_vuse (load, gimple_vuse (stmt)); 1796*38fd1498Szrj gsi_insert_before (&gsi, load, GSI_SAME_STMT); 1797*38fd1498Szrj gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0)); 1798*38fd1498Szrj gimple_assign_set_rhs_with_ops 1799*38fd1498Szrj (&gsi, COMPLEX_EXPR, 1800*38fd1498Szrj TREE_CODE (lhs) == IMAGPART_EXPR 1801*38fd1498Szrj ? other : gimple_assign_rhs1 (stmt), 1802*38fd1498Szrj TREE_CODE (lhs) == IMAGPART_EXPR 1803*38fd1498Szrj ? gimple_assign_rhs1 (stmt) : other, NULL_TREE); 1804*38fd1498Szrj stmt = gsi_stmt (gsi); 1805*38fd1498Szrj unlink_stmt_vdef (stmt); 1806*38fd1498Szrj update_stmt (stmt); 1807*38fd1498Szrj continue; 1808*38fd1498Szrj } 1809*38fd1498Szrj 1810*38fd1498Szrj /* Rewrite a vector insert via a BIT_FIELD_REF on the LHS 1811*38fd1498Szrj into a BIT_INSERT_EXPR. */ 1812*38fd1498Szrj if (TREE_CODE (lhs) == BIT_FIELD_REF 1813*38fd1498Szrj && DECL_P (TREE_OPERAND (lhs, 0)) 1814*38fd1498Szrj && bitmap_bit_p (suitable_for_renaming, 1815*38fd1498Szrj DECL_UID (TREE_OPERAND (lhs, 0))) 1816*38fd1498Szrj && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0))) 1817*38fd1498Szrj && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode 1818*38fd1498Szrj && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)), 1819*38fd1498Szrj TYPE_SIZE_UNIT (TREE_TYPE 1820*38fd1498Szrj (TREE_TYPE (TREE_OPERAND (lhs, 0)))), 1821*38fd1498Szrj 0) 1822*38fd1498Szrj && (tree_to_uhwi (TREE_OPERAND (lhs, 2)) 1823*38fd1498Szrj % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs))) == 0)) 1824*38fd1498Szrj { 1825*38fd1498Szrj tree var = TREE_OPERAND (lhs, 0); 1826*38fd1498Szrj tree val = gimple_assign_rhs1 (stmt); 1827*38fd1498Szrj if (! types_compatible_p (TREE_TYPE (TREE_TYPE (var)), 1828*38fd1498Szrj TREE_TYPE (val))) 1829*38fd1498Szrj { 1830*38fd1498Szrj tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (var))); 1831*38fd1498Szrj gimple *pun 1832*38fd1498Szrj = gimple_build_assign (tem, 1833*38fd1498Szrj build1 (VIEW_CONVERT_EXPR, 1834*38fd1498Szrj TREE_TYPE (tem), val)); 1835*38fd1498Szrj gsi_insert_before (&gsi, pun, GSI_SAME_STMT); 1836*38fd1498Szrj val = tem; 1837*38fd1498Szrj } 1838*38fd1498Szrj tree bitpos = TREE_OPERAND (lhs, 2); 1839*38fd1498Szrj gimple_assign_set_lhs (stmt, var); 1840*38fd1498Szrj gimple_assign_set_rhs_with_ops 1841*38fd1498Szrj (&gsi, BIT_INSERT_EXPR, var, val, bitpos); 1842*38fd1498Szrj stmt = gsi_stmt (gsi); 1843*38fd1498Szrj unlink_stmt_vdef (stmt); 1844*38fd1498Szrj update_stmt (stmt); 1845*38fd1498Szrj continue; 1846*38fd1498Szrj } 1847*38fd1498Szrj 1848*38fd1498Szrj /* Rewrite a vector insert using a MEM_REF on the LHS 1849*38fd1498Szrj into a BIT_INSERT_EXPR. */ 1850*38fd1498Szrj if (TREE_CODE (lhs) == MEM_REF 1851*38fd1498Szrj && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR 1852*38fd1498Szrj && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0)) 1853*38fd1498Szrj && DECL_P (sym) 1854*38fd1498Szrj && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)) 1855*38fd1498Szrj && VECTOR_TYPE_P (TREE_TYPE (sym)) 1856*38fd1498Szrj && TYPE_MODE (TREE_TYPE (sym)) != BLKmode 1857*38fd1498Szrj && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)), 1858*38fd1498Szrj TYPE_SIZE_UNIT 1859*38fd1498Szrj (TREE_TYPE (TREE_TYPE (sym))), 0) 1860*38fd1498Szrj && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1)) 1861*38fd1498Szrj && tree_int_cst_lt (TREE_OPERAND (lhs, 1), 1862*38fd1498Szrj TYPE_SIZE_UNIT (TREE_TYPE (sym))) 1863*38fd1498Szrj && (tree_to_uhwi (TREE_OPERAND (lhs, 1)) 1864*38fd1498Szrj % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0) 1865*38fd1498Szrj { 1866*38fd1498Szrj tree val = gimple_assign_rhs1 (stmt); 1867*38fd1498Szrj if (! types_compatible_p (TREE_TYPE (val), 1868*38fd1498Szrj TREE_TYPE (TREE_TYPE (sym)))) 1869*38fd1498Szrj { 1870*38fd1498Szrj tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (sym))); 1871*38fd1498Szrj gimple *pun 1872*38fd1498Szrj = gimple_build_assign (tem, 1873*38fd1498Szrj build1 (VIEW_CONVERT_EXPR, 1874*38fd1498Szrj TREE_TYPE (tem), val)); 1875*38fd1498Szrj gsi_insert_before (&gsi, pun, GSI_SAME_STMT); 1876*38fd1498Szrj val = tem; 1877*38fd1498Szrj } 1878*38fd1498Szrj tree bitpos 1879*38fd1498Szrj = wide_int_to_tree (bitsizetype, 1880*38fd1498Szrj mem_ref_offset (lhs) * BITS_PER_UNIT); 1881*38fd1498Szrj gimple_assign_set_lhs (stmt, sym); 1882*38fd1498Szrj gimple_assign_set_rhs_with_ops 1883*38fd1498Szrj (&gsi, BIT_INSERT_EXPR, sym, val, bitpos); 1884*38fd1498Szrj stmt = gsi_stmt (gsi); 1885*38fd1498Szrj unlink_stmt_vdef (stmt); 1886*38fd1498Szrj update_stmt (stmt); 1887*38fd1498Szrj continue; 1888*38fd1498Szrj } 1889*38fd1498Szrj 1890*38fd1498Szrj /* We shouldn't have any fancy wrapping of 1891*38fd1498Szrj component-refs on the LHS, but look through 1892*38fd1498Szrj VIEW_CONVERT_EXPRs as that is easy. */ 1893*38fd1498Szrj while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR) 1894*38fd1498Szrj lhs = TREE_OPERAND (lhs, 0); 1895*38fd1498Szrj if (TREE_CODE (lhs) == MEM_REF 1896*38fd1498Szrj && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR 1897*38fd1498Szrj && integer_zerop (TREE_OPERAND (lhs, 1)) 1898*38fd1498Szrj && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0)) 1899*38fd1498Szrj && DECL_P (sym) 1900*38fd1498Szrj && !TREE_ADDRESSABLE (sym) 1901*38fd1498Szrj && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))) 1902*38fd1498Szrj lhs = sym; 1903*38fd1498Szrj else 1904*38fd1498Szrj lhs = gimple_assign_lhs (stmt); 1905*38fd1498Szrj 1906*38fd1498Szrj /* Rewrite the RHS and make sure the resulting assignment 1907*38fd1498Szrj is validly typed. */ 1908*38fd1498Szrj maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming); 1909*38fd1498Szrj rhs = gimple_assign_rhs1 (stmt); 1910*38fd1498Szrj if (gimple_assign_lhs (stmt) != lhs 1911*38fd1498Szrj && !useless_type_conversion_p (TREE_TYPE (lhs), 1912*38fd1498Szrj TREE_TYPE (rhs))) 1913*38fd1498Szrj { 1914*38fd1498Szrj if (gimple_clobber_p (stmt)) 1915*38fd1498Szrj { 1916*38fd1498Szrj rhs = build_constructor (TREE_TYPE (lhs), NULL); 1917*38fd1498Szrj TREE_THIS_VOLATILE (rhs) = 1; 1918*38fd1498Szrj } 1919*38fd1498Szrj else 1920*38fd1498Szrj rhs = fold_build1 (VIEW_CONVERT_EXPR, 1921*38fd1498Szrj TREE_TYPE (lhs), rhs); 1922*38fd1498Szrj } 1923*38fd1498Szrj if (gimple_assign_lhs (stmt) != lhs) 1924*38fd1498Szrj gimple_assign_set_lhs (stmt, lhs); 1925*38fd1498Szrj 1926*38fd1498Szrj if (gimple_assign_rhs1 (stmt) != rhs) 1927*38fd1498Szrj { 1928*38fd1498Szrj gimple_stmt_iterator gsi = gsi_for_stmt (stmt); 1929*38fd1498Szrj gimple_assign_set_rhs_from_tree (&gsi, rhs); 1930*38fd1498Szrj } 1931*38fd1498Szrj } 1932*38fd1498Szrj 1933*38fd1498Szrj else if (gimple_code (stmt) == GIMPLE_CALL) 1934*38fd1498Szrj { 1935*38fd1498Szrj unsigned i; 1936*38fd1498Szrj if (optimize_atomic_compare_exchange_p (stmt)) 1937*38fd1498Szrj { 1938*38fd1498Szrj tree expected = gimple_call_arg (stmt, 1); 1939*38fd1498Szrj if (bitmap_bit_p (suitable_for_renaming, 1940*38fd1498Szrj DECL_UID (TREE_OPERAND (expected, 0)))) 1941*38fd1498Szrj { 1942*38fd1498Szrj fold_builtin_atomic_compare_exchange (&gsi); 1943*38fd1498Szrj continue; 1944*38fd1498Szrj } 1945*38fd1498Szrj } 1946*38fd1498Szrj else if (is_asan_mark_p (stmt)) 1947*38fd1498Szrj { 1948*38fd1498Szrj tree var = TREE_OPERAND (gimple_call_arg (stmt, 1), 0); 1949*38fd1498Szrj if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var))) 1950*38fd1498Szrj { 1951*38fd1498Szrj unlink_stmt_vdef (stmt); 1952*38fd1498Szrj if (asan_mark_p (stmt, ASAN_MARK_POISON)) 1953*38fd1498Szrj { 1954*38fd1498Szrj gcall *call 1955*38fd1498Szrj = gimple_build_call_internal (IFN_ASAN_POISON, 0); 1956*38fd1498Szrj gimple_call_set_lhs (call, var); 1957*38fd1498Szrj gsi_replace (&gsi, call, GSI_SAME_STMT); 1958*38fd1498Szrj } 1959*38fd1498Szrj else 1960*38fd1498Szrj { 1961*38fd1498Szrj /* In ASAN_MARK (UNPOISON, &b, ...) the variable 1962*38fd1498Szrj is uninitialized. Avoid dependencies on 1963*38fd1498Szrj previous out of scope value. */ 1964*38fd1498Szrj tree clobber 1965*38fd1498Szrj = build_constructor (TREE_TYPE (var), NULL); 1966*38fd1498Szrj TREE_THIS_VOLATILE (clobber) = 1; 1967*38fd1498Szrj gimple *g = gimple_build_assign (var, clobber); 1968*38fd1498Szrj gsi_replace (&gsi, g, GSI_SAME_STMT); 1969*38fd1498Szrj } 1970*38fd1498Szrj continue; 1971*38fd1498Szrj } 1972*38fd1498Szrj } 1973*38fd1498Szrj else if (gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER)) 1974*38fd1498Szrj for (i = 1; i < gimple_call_num_args (stmt); i++) 1975*38fd1498Szrj { 1976*38fd1498Szrj tree *argp = gimple_call_arg_ptr (stmt, i); 1977*38fd1498Szrj if (*argp == null_pointer_node) 1978*38fd1498Szrj continue; 1979*38fd1498Szrj gcc_assert (TREE_CODE (*argp) == ADDR_EXPR 1980*38fd1498Szrj && VAR_P (TREE_OPERAND (*argp, 0))); 1981*38fd1498Szrj tree var = TREE_OPERAND (*argp, 0); 1982*38fd1498Szrj if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var))) 1983*38fd1498Szrj *argp = null_pointer_node; 1984*38fd1498Szrj } 1985*38fd1498Szrj for (i = 0; i < gimple_call_num_args (stmt); ++i) 1986*38fd1498Szrj { 1987*38fd1498Szrj tree *argp = gimple_call_arg_ptr (stmt, i); 1988*38fd1498Szrj maybe_rewrite_mem_ref_base (argp, suitable_for_renaming); 1989*38fd1498Szrj } 1990*38fd1498Szrj } 1991*38fd1498Szrj 1992*38fd1498Szrj else if (gimple_code (stmt) == GIMPLE_ASM) 1993*38fd1498Szrj { 1994*38fd1498Szrj gasm *asm_stmt = as_a <gasm *> (stmt); 1995*38fd1498Szrj unsigned i; 1996*38fd1498Szrj for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i) 1997*38fd1498Szrj { 1998*38fd1498Szrj tree link = gimple_asm_output_op (asm_stmt, i); 1999*38fd1498Szrj maybe_rewrite_mem_ref_base (&TREE_VALUE (link), 2000*38fd1498Szrj suitable_for_renaming); 2001*38fd1498Szrj } 2002*38fd1498Szrj for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i) 2003*38fd1498Szrj { 2004*38fd1498Szrj tree link = gimple_asm_input_op (asm_stmt, i); 2005*38fd1498Szrj maybe_rewrite_mem_ref_base (&TREE_VALUE (link), 2006*38fd1498Szrj suitable_for_renaming); 2007*38fd1498Szrj } 2008*38fd1498Szrj } 2009*38fd1498Szrj 2010*38fd1498Szrj else if (gimple_debug_bind_p (stmt) 2011*38fd1498Szrj && gimple_debug_bind_has_value_p (stmt)) 2012*38fd1498Szrj { 2013*38fd1498Szrj tree *valuep = gimple_debug_bind_get_value_ptr (stmt); 2014*38fd1498Szrj tree decl; 2015*38fd1498Szrj maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming); 2016*38fd1498Szrj decl = non_rewritable_mem_ref_base (*valuep); 2017*38fd1498Szrj if (decl 2018*38fd1498Szrj && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl))) 2019*38fd1498Szrj gimple_debug_bind_reset_value (stmt); 2020*38fd1498Szrj } 2021*38fd1498Szrj 2022*38fd1498Szrj if (gimple_references_memory_p (stmt) 2023*38fd1498Szrj || is_gimple_debug (stmt)) 2024*38fd1498Szrj update_stmt (stmt); 2025*38fd1498Szrj 2026*38fd1498Szrj gsi_next (&gsi); 2027*38fd1498Szrj } 2028*38fd1498Szrj 2029*38fd1498Szrj /* Update SSA form here, we are called as non-pass as well. */ 2030*38fd1498Szrj if (number_of_loops (cfun) > 1 2031*38fd1498Szrj && loops_state_satisfies_p (LOOP_CLOSED_SSA)) 2032*38fd1498Szrj rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa); 2033*38fd1498Szrj else 2034*38fd1498Szrj update_ssa (TODO_update_ssa); 2035*38fd1498Szrj } 2036*38fd1498Szrj 2037*38fd1498Szrj timevar_pop (TV_ADDRESS_TAKEN); 2038*38fd1498Szrj } 2039*38fd1498Szrj 2040*38fd1498Szrj namespace { 2041*38fd1498Szrj 2042*38fd1498Szrj const pass_data pass_data_update_address_taken = 2043*38fd1498Szrj { 2044*38fd1498Szrj GIMPLE_PASS, /* type */ 2045*38fd1498Szrj "addressables", /* name */ 2046*38fd1498Szrj OPTGROUP_NONE, /* optinfo_flags */ 2047*38fd1498Szrj TV_ADDRESS_TAKEN, /* tv_id */ 2048*38fd1498Szrj PROP_ssa, /* properties_required */ 2049*38fd1498Szrj 0, /* properties_provided */ 2050*38fd1498Szrj 0, /* properties_destroyed */ 2051*38fd1498Szrj 0, /* todo_flags_start */ 2052*38fd1498Szrj TODO_update_address_taken, /* todo_flags_finish */ 2053*38fd1498Szrj }; 2054*38fd1498Szrj 2055*38fd1498Szrj class pass_update_address_taken : public gimple_opt_pass 2056*38fd1498Szrj { 2057*38fd1498Szrj public: 2058*38fd1498Szrj pass_update_address_taken (gcc::context *ctxt) 2059*38fd1498Szrj : gimple_opt_pass (pass_data_update_address_taken, ctxt) 2060*38fd1498Szrj {} 2061*38fd1498Szrj 2062*38fd1498Szrj /* opt_pass methods: */ 2063*38fd1498Szrj 2064*38fd1498Szrj }; // class pass_update_address_taken 2065*38fd1498Szrj 2066*38fd1498Szrj } // anon namespace 2067*38fd1498Szrj 2068*38fd1498Szrj gimple_opt_pass * 2069*38fd1498Szrj make_pass_update_address_taken (gcc::context *ctxt) 2070*38fd1498Szrj { 2071*38fd1498Szrj return new pass_update_address_taken (ctxt); 2072*38fd1498Szrj } 2073