1e4b17023SJohn Marino /* SSA Jump Threading 2e4b17023SJohn Marino Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 3e4b17023SJohn Marino Free Software Foundation, Inc. 4e4b17023SJohn Marino Contributed by Jeff Law <law@redhat.com> 5e4b17023SJohn Marino 6e4b17023SJohn Marino This file is part of GCC. 7e4b17023SJohn Marino 8e4b17023SJohn Marino GCC is free software; you can redistribute it and/or modify 9e4b17023SJohn Marino it under the terms of the GNU General Public License as published by 10e4b17023SJohn Marino the Free Software Foundation; either version 3, or (at your option) 11e4b17023SJohn Marino any later version. 12e4b17023SJohn Marino 13e4b17023SJohn Marino GCC is distributed in the hope that it will be useful, 14e4b17023SJohn Marino but WITHOUT ANY WARRANTY; without even the implied warranty of 15e4b17023SJohn Marino MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 16e4b17023SJohn Marino GNU General Public License for more details. 17e4b17023SJohn Marino 18e4b17023SJohn Marino You should have received a copy of the GNU General Public License 19e4b17023SJohn Marino along with GCC; see the file COPYING3. If not see 20e4b17023SJohn Marino <http://www.gnu.org/licenses/>. */ 21e4b17023SJohn Marino 22e4b17023SJohn Marino #include "config.h" 23e4b17023SJohn Marino #include "system.h" 24e4b17023SJohn Marino #include "coretypes.h" 25e4b17023SJohn Marino #include "tm.h" 26e4b17023SJohn Marino #include "tree.h" 27e4b17023SJohn Marino #include "flags.h" 28e4b17023SJohn Marino #include "tm_p.h" 29e4b17023SJohn Marino #include "basic-block.h" 30e4b17023SJohn Marino #include "cfgloop.h" 31e4b17023SJohn Marino #include "output.h" 32e4b17023SJohn Marino #include "function.h" 33e4b17023SJohn Marino #include "timevar.h" 34e4b17023SJohn Marino #include "tree-dump.h" 35e4b17023SJohn Marino #include "tree-flow.h" 36e4b17023SJohn Marino #include "tree-pass.h" 37e4b17023SJohn Marino #include "tree-ssa-propagate.h" 38e4b17023SJohn Marino #include "langhooks.h" 39e4b17023SJohn Marino #include "params.h" 40e4b17023SJohn Marino 41e4b17023SJohn Marino /* To avoid code explosion due to jump threading, we limit the 42e4b17023SJohn Marino number of statements we are going to copy. This variable 43e4b17023SJohn Marino holds the number of statements currently seen that we'll have 44e4b17023SJohn Marino to copy as part of the jump threading process. */ 45e4b17023SJohn Marino static int stmt_count; 46e4b17023SJohn Marino 47e4b17023SJohn Marino /* Array to record value-handles per SSA_NAME. */ 48e4b17023SJohn Marino VEC(tree,heap) *ssa_name_values; 49e4b17023SJohn Marino 50e4b17023SJohn Marino /* Set the value for the SSA name NAME to VALUE. */ 51e4b17023SJohn Marino 52e4b17023SJohn Marino void 53e4b17023SJohn Marino set_ssa_name_value (tree name, tree value) 54e4b17023SJohn Marino { 55e4b17023SJohn Marino if (SSA_NAME_VERSION (name) >= VEC_length (tree, ssa_name_values)) 56e4b17023SJohn Marino VEC_safe_grow_cleared (tree, heap, ssa_name_values, 57e4b17023SJohn Marino SSA_NAME_VERSION (name) + 1); 58e4b17023SJohn Marino VEC_replace (tree, ssa_name_values, SSA_NAME_VERSION (name), value); 59e4b17023SJohn Marino } 60e4b17023SJohn Marino 61e4b17023SJohn Marino /* Initialize the per SSA_NAME value-handles array. Returns it. */ 62e4b17023SJohn Marino void 63e4b17023SJohn Marino threadedge_initialize_values (void) 64e4b17023SJohn Marino { 65e4b17023SJohn Marino gcc_assert (ssa_name_values == NULL); 66e4b17023SJohn Marino ssa_name_values = VEC_alloc(tree, heap, num_ssa_names); 67e4b17023SJohn Marino } 68e4b17023SJohn Marino 69e4b17023SJohn Marino /* Free the per SSA_NAME value-handle array. */ 70e4b17023SJohn Marino void 71e4b17023SJohn Marino threadedge_finalize_values (void) 72e4b17023SJohn Marino { 73e4b17023SJohn Marino VEC_free(tree, heap, ssa_name_values); 74e4b17023SJohn Marino } 75e4b17023SJohn Marino 76e4b17023SJohn Marino /* Return TRUE if we may be able to thread an incoming edge into 77e4b17023SJohn Marino BB to an outgoing edge from BB. Return FALSE otherwise. */ 78e4b17023SJohn Marino 79e4b17023SJohn Marino bool 80e4b17023SJohn Marino potentially_threadable_block (basic_block bb) 81e4b17023SJohn Marino { 82e4b17023SJohn Marino gimple_stmt_iterator gsi; 83e4b17023SJohn Marino 84e4b17023SJohn Marino /* If BB has a single successor or a single predecessor, then 85e4b17023SJohn Marino there is no threading opportunity. */ 86e4b17023SJohn Marino if (single_succ_p (bb) || single_pred_p (bb)) 87e4b17023SJohn Marino return false; 88e4b17023SJohn Marino 89e4b17023SJohn Marino /* If BB does not end with a conditional, switch or computed goto, 90e4b17023SJohn Marino then there is no threading opportunity. */ 91e4b17023SJohn Marino gsi = gsi_last_bb (bb); 92e4b17023SJohn Marino if (gsi_end_p (gsi) 93e4b17023SJohn Marino || ! gsi_stmt (gsi) 94e4b17023SJohn Marino || (gimple_code (gsi_stmt (gsi)) != GIMPLE_COND 95e4b17023SJohn Marino && gimple_code (gsi_stmt (gsi)) != GIMPLE_GOTO 96e4b17023SJohn Marino && gimple_code (gsi_stmt (gsi)) != GIMPLE_SWITCH)) 97e4b17023SJohn Marino return false; 98e4b17023SJohn Marino 99e4b17023SJohn Marino return true; 100e4b17023SJohn Marino } 101e4b17023SJohn Marino 102e4b17023SJohn Marino /* Return the LHS of any ASSERT_EXPR where OP appears as the first 103e4b17023SJohn Marino argument to the ASSERT_EXPR and in which the ASSERT_EXPR dominates 104e4b17023SJohn Marino BB. If no such ASSERT_EXPR is found, return OP. */ 105e4b17023SJohn Marino 106e4b17023SJohn Marino static tree 107e4b17023SJohn Marino lhs_of_dominating_assert (tree op, basic_block bb, gimple stmt) 108e4b17023SJohn Marino { 109e4b17023SJohn Marino imm_use_iterator imm_iter; 110e4b17023SJohn Marino gimple use_stmt; 111e4b17023SJohn Marino use_operand_p use_p; 112e4b17023SJohn Marino 113e4b17023SJohn Marino FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op) 114e4b17023SJohn Marino { 115e4b17023SJohn Marino use_stmt = USE_STMT (use_p); 116e4b17023SJohn Marino if (use_stmt != stmt 117e4b17023SJohn Marino && gimple_assign_single_p (use_stmt) 118e4b17023SJohn Marino && TREE_CODE (gimple_assign_rhs1 (use_stmt)) == ASSERT_EXPR 119e4b17023SJohn Marino && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == op 120e4b17023SJohn Marino && dominated_by_p (CDI_DOMINATORS, bb, gimple_bb (use_stmt))) 121e4b17023SJohn Marino { 122e4b17023SJohn Marino return gimple_assign_lhs (use_stmt); 123e4b17023SJohn Marino } 124e4b17023SJohn Marino } 125e4b17023SJohn Marino return op; 126e4b17023SJohn Marino } 127e4b17023SJohn Marino 128e4b17023SJohn Marino /* We record temporary equivalences created by PHI nodes or 129e4b17023SJohn Marino statements within the target block. Doing so allows us to 130e4b17023SJohn Marino identify more jump threading opportunities, even in blocks 131e4b17023SJohn Marino with side effects. 132e4b17023SJohn Marino 133e4b17023SJohn Marino We keep track of those temporary equivalences in a stack 134e4b17023SJohn Marino structure so that we can unwind them when we're done processing 135e4b17023SJohn Marino a particular edge. This routine handles unwinding the data 136e4b17023SJohn Marino structures. */ 137e4b17023SJohn Marino 138e4b17023SJohn Marino static void 139e4b17023SJohn Marino remove_temporary_equivalences (VEC(tree, heap) **stack) 140e4b17023SJohn Marino { 141e4b17023SJohn Marino while (VEC_length (tree, *stack) > 0) 142e4b17023SJohn Marino { 143e4b17023SJohn Marino tree prev_value, dest; 144e4b17023SJohn Marino 145e4b17023SJohn Marino dest = VEC_pop (tree, *stack); 146e4b17023SJohn Marino 147e4b17023SJohn Marino /* A NULL value indicates we should stop unwinding, otherwise 148e4b17023SJohn Marino pop off the next entry as they're recorded in pairs. */ 149e4b17023SJohn Marino if (dest == NULL) 150e4b17023SJohn Marino break; 151e4b17023SJohn Marino 152e4b17023SJohn Marino prev_value = VEC_pop (tree, *stack); 153e4b17023SJohn Marino set_ssa_name_value (dest, prev_value); 154e4b17023SJohn Marino } 155e4b17023SJohn Marino } 156e4b17023SJohn Marino 157e4b17023SJohn Marino /* Record a temporary equivalence, saving enough information so that 158e4b17023SJohn Marino we can restore the state of recorded equivalences when we're 159e4b17023SJohn Marino done processing the current edge. */ 160e4b17023SJohn Marino 161e4b17023SJohn Marino static void 162e4b17023SJohn Marino record_temporary_equivalence (tree x, tree y, VEC(tree, heap) **stack) 163e4b17023SJohn Marino { 164e4b17023SJohn Marino tree prev_x = SSA_NAME_VALUE (x); 165e4b17023SJohn Marino 166e4b17023SJohn Marino if (TREE_CODE (y) == SSA_NAME) 167e4b17023SJohn Marino { 168e4b17023SJohn Marino tree tmp = SSA_NAME_VALUE (y); 169e4b17023SJohn Marino y = tmp ? tmp : y; 170e4b17023SJohn Marino } 171e4b17023SJohn Marino 172e4b17023SJohn Marino set_ssa_name_value (x, y); 173e4b17023SJohn Marino VEC_reserve (tree, heap, *stack, 2); 174e4b17023SJohn Marino VEC_quick_push (tree, *stack, prev_x); 175e4b17023SJohn Marino VEC_quick_push (tree, *stack, x); 176e4b17023SJohn Marino } 177e4b17023SJohn Marino 178e4b17023SJohn Marino /* Record temporary equivalences created by PHIs at the target of the 179e4b17023SJohn Marino edge E. Record unwind information for the equivalences onto STACK. 180e4b17023SJohn Marino 181e4b17023SJohn Marino If a PHI which prevents threading is encountered, then return FALSE 182e4b17023SJohn Marino indicating we should not thread this edge, else return TRUE. */ 183e4b17023SJohn Marino 184e4b17023SJohn Marino static bool 185e4b17023SJohn Marino record_temporary_equivalences_from_phis (edge e, VEC(tree, heap) **stack) 186e4b17023SJohn Marino { 187e4b17023SJohn Marino gimple_stmt_iterator gsi; 188e4b17023SJohn Marino 189e4b17023SJohn Marino /* Each PHI creates a temporary equivalence, record them. 190e4b17023SJohn Marino These are context sensitive equivalences and will be removed 191e4b17023SJohn Marino later. */ 192e4b17023SJohn Marino for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi)) 193e4b17023SJohn Marino { 194e4b17023SJohn Marino gimple phi = gsi_stmt (gsi); 195e4b17023SJohn Marino tree src = PHI_ARG_DEF_FROM_EDGE (phi, e); 196e4b17023SJohn Marino tree dst = gimple_phi_result (phi); 197e4b17023SJohn Marino 198e4b17023SJohn Marino /* If the desired argument is not the same as this PHI's result 199e4b17023SJohn Marino and it is set by a PHI in E->dest, then we can not thread 200e4b17023SJohn Marino through E->dest. */ 201e4b17023SJohn Marino if (src != dst 202e4b17023SJohn Marino && TREE_CODE (src) == SSA_NAME 203e4b17023SJohn Marino && gimple_code (SSA_NAME_DEF_STMT (src)) == GIMPLE_PHI 204e4b17023SJohn Marino && gimple_bb (SSA_NAME_DEF_STMT (src)) == e->dest) 205e4b17023SJohn Marino return false; 206e4b17023SJohn Marino 207e4b17023SJohn Marino /* We consider any non-virtual PHI as a statement since it 208e4b17023SJohn Marino count result in a constant assignment or copy operation. */ 209e4b17023SJohn Marino if (is_gimple_reg (dst)) 210e4b17023SJohn Marino stmt_count++; 211e4b17023SJohn Marino 212e4b17023SJohn Marino record_temporary_equivalence (dst, src, stack); 213e4b17023SJohn Marino } 214e4b17023SJohn Marino return true; 215e4b17023SJohn Marino } 216e4b17023SJohn Marino 217e4b17023SJohn Marino /* Fold the RHS of an assignment statement and return it as a tree. 218e4b17023SJohn Marino May return NULL_TREE if no simplification is possible. */ 219e4b17023SJohn Marino 220e4b17023SJohn Marino static tree 221e4b17023SJohn Marino fold_assignment_stmt (gimple stmt) 222e4b17023SJohn Marino { 223e4b17023SJohn Marino enum tree_code subcode = gimple_assign_rhs_code (stmt); 224e4b17023SJohn Marino 225e4b17023SJohn Marino switch (get_gimple_rhs_class (subcode)) 226e4b17023SJohn Marino { 227e4b17023SJohn Marino case GIMPLE_SINGLE_RHS: 228e4b17023SJohn Marino return fold (gimple_assign_rhs1 (stmt)); 229e4b17023SJohn Marino 230e4b17023SJohn Marino case GIMPLE_UNARY_RHS: 231e4b17023SJohn Marino { 232e4b17023SJohn Marino tree lhs = gimple_assign_lhs (stmt); 233e4b17023SJohn Marino tree op0 = gimple_assign_rhs1 (stmt); 234e4b17023SJohn Marino return fold_unary (subcode, TREE_TYPE (lhs), op0); 235e4b17023SJohn Marino } 236e4b17023SJohn Marino 237e4b17023SJohn Marino case GIMPLE_BINARY_RHS: 238e4b17023SJohn Marino { 239e4b17023SJohn Marino tree lhs = gimple_assign_lhs (stmt); 240e4b17023SJohn Marino tree op0 = gimple_assign_rhs1 (stmt); 241e4b17023SJohn Marino tree op1 = gimple_assign_rhs2 (stmt); 242e4b17023SJohn Marino return fold_binary (subcode, TREE_TYPE (lhs), op0, op1); 243e4b17023SJohn Marino } 244e4b17023SJohn Marino 245e4b17023SJohn Marino case GIMPLE_TERNARY_RHS: 246e4b17023SJohn Marino { 247e4b17023SJohn Marino tree lhs = gimple_assign_lhs (stmt); 248e4b17023SJohn Marino tree op0 = gimple_assign_rhs1 (stmt); 249e4b17023SJohn Marino tree op1 = gimple_assign_rhs2 (stmt); 250e4b17023SJohn Marino tree op2 = gimple_assign_rhs3 (stmt); 251e4b17023SJohn Marino 252e4b17023SJohn Marino /* Sadly, we have to handle conditional assignments specially 253e4b17023SJohn Marino here, because fold expects all the operands of an expression 254e4b17023SJohn Marino to be folded before the expression itself is folded, but we 255e4b17023SJohn Marino can't just substitute the folded condition here. */ 256e4b17023SJohn Marino if (gimple_assign_rhs_code (stmt) == COND_EXPR) 257e4b17023SJohn Marino op0 = fold (op0); 258e4b17023SJohn Marino 259e4b17023SJohn Marino return fold_ternary (subcode, TREE_TYPE (lhs), op0, op1, op2); 260e4b17023SJohn Marino } 261e4b17023SJohn Marino 262e4b17023SJohn Marino default: 263e4b17023SJohn Marino gcc_unreachable (); 264e4b17023SJohn Marino } 265e4b17023SJohn Marino } 266e4b17023SJohn Marino 267e4b17023SJohn Marino /* Try to simplify each statement in E->dest, ultimately leading to 268e4b17023SJohn Marino a simplification of the COND_EXPR at the end of E->dest. 269e4b17023SJohn Marino 270e4b17023SJohn Marino Record unwind information for temporary equivalences onto STACK. 271e4b17023SJohn Marino 272e4b17023SJohn Marino Use SIMPLIFY (a pointer to a callback function) to further simplify 273e4b17023SJohn Marino statements using pass specific information. 274e4b17023SJohn Marino 275e4b17023SJohn Marino We might consider marking just those statements which ultimately 276e4b17023SJohn Marino feed the COND_EXPR. It's not clear if the overhead of bookkeeping 277e4b17023SJohn Marino would be recovered by trying to simplify fewer statements. 278e4b17023SJohn Marino 279e4b17023SJohn Marino If we are able to simplify a statement into the form 280e4b17023SJohn Marino SSA_NAME = (SSA_NAME | gimple invariant), then we can record 281e4b17023SJohn Marino a context sensitive equivalence which may help us simplify 282e4b17023SJohn Marino later statements in E->dest. */ 283e4b17023SJohn Marino 284e4b17023SJohn Marino static gimple 285e4b17023SJohn Marino record_temporary_equivalences_from_stmts_at_dest (edge e, 286e4b17023SJohn Marino VEC(tree, heap) **stack, 287e4b17023SJohn Marino tree (*simplify) (gimple, 288e4b17023SJohn Marino gimple)) 289e4b17023SJohn Marino { 290e4b17023SJohn Marino gimple stmt = NULL; 291e4b17023SJohn Marino gimple_stmt_iterator gsi; 292e4b17023SJohn Marino int max_stmt_count; 293e4b17023SJohn Marino 294e4b17023SJohn Marino max_stmt_count = PARAM_VALUE (PARAM_MAX_JUMP_THREAD_DUPLICATION_STMTS); 295e4b17023SJohn Marino 296e4b17023SJohn Marino /* Walk through each statement in the block recording equivalences 297e4b17023SJohn Marino we discover. Note any equivalences we discover are context 298e4b17023SJohn Marino sensitive (ie, are dependent on traversing E) and must be unwound 299e4b17023SJohn Marino when we're finished processing E. */ 300e4b17023SJohn Marino for (gsi = gsi_start_bb (e->dest); !gsi_end_p (gsi); gsi_next (&gsi)) 301e4b17023SJohn Marino { 302e4b17023SJohn Marino tree cached_lhs = NULL; 303e4b17023SJohn Marino 304e4b17023SJohn Marino stmt = gsi_stmt (gsi); 305e4b17023SJohn Marino 306e4b17023SJohn Marino /* Ignore empty statements and labels. */ 307e4b17023SJohn Marino if (gimple_code (stmt) == GIMPLE_NOP 308e4b17023SJohn Marino || gimple_code (stmt) == GIMPLE_LABEL 309e4b17023SJohn Marino || is_gimple_debug (stmt)) 310e4b17023SJohn Marino continue; 311e4b17023SJohn Marino 312e4b17023SJohn Marino /* If the statement has volatile operands, then we assume we 313e4b17023SJohn Marino can not thread through this block. This is overly 314e4b17023SJohn Marino conservative in some ways. */ 315e4b17023SJohn Marino if (gimple_code (stmt) == GIMPLE_ASM && gimple_asm_volatile_p (stmt)) 316e4b17023SJohn Marino return NULL; 317e4b17023SJohn Marino 318e4b17023SJohn Marino /* If duplicating this block is going to cause too much code 319e4b17023SJohn Marino expansion, then do not thread through this block. */ 320e4b17023SJohn Marino stmt_count++; 321e4b17023SJohn Marino if (stmt_count > max_stmt_count) 322e4b17023SJohn Marino return NULL; 323e4b17023SJohn Marino 324e4b17023SJohn Marino /* If this is not a statement that sets an SSA_NAME to a new 325e4b17023SJohn Marino value, then do not try to simplify this statement as it will 326e4b17023SJohn Marino not simplify in any way that is helpful for jump threading. */ 327e4b17023SJohn Marino if ((gimple_code (stmt) != GIMPLE_ASSIGN 328e4b17023SJohn Marino || TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME) 329e4b17023SJohn Marino && (gimple_code (stmt) != GIMPLE_CALL 330e4b17023SJohn Marino || gimple_call_lhs (stmt) == NULL_TREE 331e4b17023SJohn Marino || TREE_CODE (gimple_call_lhs (stmt)) != SSA_NAME)) 332e4b17023SJohn Marino continue; 333e4b17023SJohn Marino 334e4b17023SJohn Marino /* The result of __builtin_object_size depends on all the arguments 335e4b17023SJohn Marino of a phi node. Temporarily using only one edge produces invalid 336e4b17023SJohn Marino results. For example 337e4b17023SJohn Marino 338e4b17023SJohn Marino if (x < 6) 339e4b17023SJohn Marino goto l; 340e4b17023SJohn Marino else 341e4b17023SJohn Marino goto l; 342e4b17023SJohn Marino 343e4b17023SJohn Marino l: 344e4b17023SJohn Marino r = PHI <&w[2].a[1](2), &a.a[6](3)> 345e4b17023SJohn Marino __builtin_object_size (r, 0) 346e4b17023SJohn Marino 347e4b17023SJohn Marino The result of __builtin_object_size is defined to be the maximum of 348e4b17023SJohn Marino remaining bytes. If we use only one edge on the phi, the result will 349e4b17023SJohn Marino change to be the remaining bytes for the corresponding phi argument. 350e4b17023SJohn Marino 351e4b17023SJohn Marino Similarly for __builtin_constant_p: 352e4b17023SJohn Marino 353e4b17023SJohn Marino r = PHI <1(2), 2(3)> 354e4b17023SJohn Marino __builtin_constant_p (r) 355e4b17023SJohn Marino 356e4b17023SJohn Marino Both PHI arguments are constant, but x ? 1 : 2 is still not 357e4b17023SJohn Marino constant. */ 358e4b17023SJohn Marino 359e4b17023SJohn Marino if (is_gimple_call (stmt)) 360e4b17023SJohn Marino { 361e4b17023SJohn Marino tree fndecl = gimple_call_fndecl (stmt); 362e4b17023SJohn Marino if (fndecl 363e4b17023SJohn Marino && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE 364e4b17023SJohn Marino || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CONSTANT_P)) 365e4b17023SJohn Marino continue; 366e4b17023SJohn Marino } 367e4b17023SJohn Marino 368e4b17023SJohn Marino /* At this point we have a statement which assigns an RHS to an 369e4b17023SJohn Marino SSA_VAR on the LHS. We want to try and simplify this statement 370e4b17023SJohn Marino to expose more context sensitive equivalences which in turn may 371e4b17023SJohn Marino allow us to simplify the condition at the end of the loop. 372e4b17023SJohn Marino 373e4b17023SJohn Marino Handle simple copy operations as well as implied copies from 374e4b17023SJohn Marino ASSERT_EXPRs. */ 375e4b17023SJohn Marino if (gimple_assign_single_p (stmt) 376e4b17023SJohn Marino && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME) 377e4b17023SJohn Marino cached_lhs = gimple_assign_rhs1 (stmt); 378e4b17023SJohn Marino else if (gimple_assign_single_p (stmt) 379e4b17023SJohn Marino && TREE_CODE (gimple_assign_rhs1 (stmt)) == ASSERT_EXPR) 380e4b17023SJohn Marino cached_lhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0); 381e4b17023SJohn Marino else 382e4b17023SJohn Marino { 383e4b17023SJohn Marino /* A statement that is not a trivial copy or ASSERT_EXPR. 384e4b17023SJohn Marino We're going to temporarily copy propagate the operands 385e4b17023SJohn Marino and see if that allows us to simplify this statement. */ 386e4b17023SJohn Marino tree *copy; 387e4b17023SJohn Marino ssa_op_iter iter; 388e4b17023SJohn Marino use_operand_p use_p; 389e4b17023SJohn Marino unsigned int num, i = 0; 390e4b17023SJohn Marino 391e4b17023SJohn Marino num = NUM_SSA_OPERANDS (stmt, (SSA_OP_USE | SSA_OP_VUSE)); 392e4b17023SJohn Marino copy = XCNEWVEC (tree, num); 393e4b17023SJohn Marino 394e4b17023SJohn Marino /* Make a copy of the uses & vuses into USES_COPY, then cprop into 395e4b17023SJohn Marino the operands. */ 396e4b17023SJohn Marino FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE) 397e4b17023SJohn Marino { 398e4b17023SJohn Marino tree tmp = NULL; 399e4b17023SJohn Marino tree use = USE_FROM_PTR (use_p); 400e4b17023SJohn Marino 401e4b17023SJohn Marino copy[i++] = use; 402e4b17023SJohn Marino if (TREE_CODE (use) == SSA_NAME) 403e4b17023SJohn Marino tmp = SSA_NAME_VALUE (use); 404e4b17023SJohn Marino if (tmp) 405e4b17023SJohn Marino SET_USE (use_p, tmp); 406e4b17023SJohn Marino } 407e4b17023SJohn Marino 408e4b17023SJohn Marino /* Try to fold/lookup the new expression. Inserting the 409e4b17023SJohn Marino expression into the hash table is unlikely to help. */ 410e4b17023SJohn Marino if (is_gimple_call (stmt)) 411e4b17023SJohn Marino cached_lhs = fold_call_stmt (stmt, false); 412e4b17023SJohn Marino else 413e4b17023SJohn Marino cached_lhs = fold_assignment_stmt (stmt); 414e4b17023SJohn Marino 415e4b17023SJohn Marino if (!cached_lhs 416e4b17023SJohn Marino || (TREE_CODE (cached_lhs) != SSA_NAME 417e4b17023SJohn Marino && !is_gimple_min_invariant (cached_lhs))) 418e4b17023SJohn Marino cached_lhs = (*simplify) (stmt, stmt); 419e4b17023SJohn Marino 420e4b17023SJohn Marino /* Restore the statement's original uses/defs. */ 421e4b17023SJohn Marino i = 0; 422e4b17023SJohn Marino FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE | SSA_OP_VUSE) 423e4b17023SJohn Marino SET_USE (use_p, copy[i++]); 424e4b17023SJohn Marino 425e4b17023SJohn Marino free (copy); 426e4b17023SJohn Marino } 427e4b17023SJohn Marino 428e4b17023SJohn Marino /* Record the context sensitive equivalence if we were able 429e4b17023SJohn Marino to simplify this statement. */ 430e4b17023SJohn Marino if (cached_lhs 431e4b17023SJohn Marino && (TREE_CODE (cached_lhs) == SSA_NAME 432e4b17023SJohn Marino || is_gimple_min_invariant (cached_lhs))) 433e4b17023SJohn Marino record_temporary_equivalence (gimple_get_lhs (stmt), cached_lhs, stack); 434e4b17023SJohn Marino } 435e4b17023SJohn Marino return stmt; 436e4b17023SJohn Marino } 437e4b17023SJohn Marino 438e4b17023SJohn Marino /* Simplify the control statement at the end of the block E->dest. 439e4b17023SJohn Marino 440e4b17023SJohn Marino To avoid allocating memory unnecessarily, a scratch GIMPLE_COND 441e4b17023SJohn Marino is available to use/clobber in DUMMY_COND. 442e4b17023SJohn Marino 443e4b17023SJohn Marino Use SIMPLIFY (a pointer to a callback function) to further simplify 444e4b17023SJohn Marino a condition using pass specific information. 445e4b17023SJohn Marino 446e4b17023SJohn Marino Return the simplified condition or NULL if simplification could 447e4b17023SJohn Marino not be performed. */ 448e4b17023SJohn Marino 449e4b17023SJohn Marino static tree 450e4b17023SJohn Marino simplify_control_stmt_condition (edge e, 451e4b17023SJohn Marino gimple stmt, 452e4b17023SJohn Marino gimple dummy_cond, 453e4b17023SJohn Marino tree (*simplify) (gimple, gimple), 454e4b17023SJohn Marino bool handle_dominating_asserts) 455e4b17023SJohn Marino { 456e4b17023SJohn Marino tree cond, cached_lhs; 457e4b17023SJohn Marino enum gimple_code code = gimple_code (stmt); 458e4b17023SJohn Marino 459e4b17023SJohn Marino /* For comparisons, we have to update both operands, then try 460e4b17023SJohn Marino to simplify the comparison. */ 461e4b17023SJohn Marino if (code == GIMPLE_COND) 462e4b17023SJohn Marino { 463e4b17023SJohn Marino tree op0, op1; 464e4b17023SJohn Marino enum tree_code cond_code; 465e4b17023SJohn Marino 466e4b17023SJohn Marino op0 = gimple_cond_lhs (stmt); 467e4b17023SJohn Marino op1 = gimple_cond_rhs (stmt); 468e4b17023SJohn Marino cond_code = gimple_cond_code (stmt); 469e4b17023SJohn Marino 470e4b17023SJohn Marino /* Get the current value of both operands. */ 471e4b17023SJohn Marino if (TREE_CODE (op0) == SSA_NAME) 472e4b17023SJohn Marino { 473e4b17023SJohn Marino tree tmp = SSA_NAME_VALUE (op0); 474e4b17023SJohn Marino if (tmp) 475e4b17023SJohn Marino op0 = tmp; 476e4b17023SJohn Marino } 477e4b17023SJohn Marino 478e4b17023SJohn Marino if (TREE_CODE (op1) == SSA_NAME) 479e4b17023SJohn Marino { 480e4b17023SJohn Marino tree tmp = SSA_NAME_VALUE (op1); 481e4b17023SJohn Marino if (tmp) 482e4b17023SJohn Marino op1 = tmp; 483e4b17023SJohn Marino } 484e4b17023SJohn Marino 485e4b17023SJohn Marino if (handle_dominating_asserts) 486e4b17023SJohn Marino { 487e4b17023SJohn Marino /* Now see if the operand was consumed by an ASSERT_EXPR 488e4b17023SJohn Marino which dominates E->src. If so, we want to replace the 489e4b17023SJohn Marino operand with the LHS of the ASSERT_EXPR. */ 490e4b17023SJohn Marino if (TREE_CODE (op0) == SSA_NAME) 491e4b17023SJohn Marino op0 = lhs_of_dominating_assert (op0, e->src, stmt); 492e4b17023SJohn Marino 493e4b17023SJohn Marino if (TREE_CODE (op1) == SSA_NAME) 494e4b17023SJohn Marino op1 = lhs_of_dominating_assert (op1, e->src, stmt); 495e4b17023SJohn Marino } 496e4b17023SJohn Marino 497e4b17023SJohn Marino /* We may need to canonicalize the comparison. For 498e4b17023SJohn Marino example, op0 might be a constant while op1 is an 499e4b17023SJohn Marino SSA_NAME. Failure to canonicalize will cause us to 500e4b17023SJohn Marino miss threading opportunities. */ 501e4b17023SJohn Marino if (tree_swap_operands_p (op0, op1, false)) 502e4b17023SJohn Marino { 503e4b17023SJohn Marino tree tmp; 504e4b17023SJohn Marino cond_code = swap_tree_comparison (cond_code); 505e4b17023SJohn Marino tmp = op0; 506e4b17023SJohn Marino op0 = op1; 507e4b17023SJohn Marino op1 = tmp; 508e4b17023SJohn Marino } 509e4b17023SJohn Marino 510e4b17023SJohn Marino /* Stuff the operator and operands into our dummy conditional 511e4b17023SJohn Marino expression. */ 512e4b17023SJohn Marino gimple_cond_set_code (dummy_cond, cond_code); 513e4b17023SJohn Marino gimple_cond_set_lhs (dummy_cond, op0); 514e4b17023SJohn Marino gimple_cond_set_rhs (dummy_cond, op1); 515e4b17023SJohn Marino 516e4b17023SJohn Marino /* We absolutely do not care about any type conversions 517e4b17023SJohn Marino we only care about a zero/nonzero value. */ 518e4b17023SJohn Marino fold_defer_overflow_warnings (); 519e4b17023SJohn Marino 520e4b17023SJohn Marino cached_lhs = fold_binary (cond_code, boolean_type_node, op0, op1); 521e4b17023SJohn Marino if (cached_lhs) 522e4b17023SJohn Marino while (CONVERT_EXPR_P (cached_lhs)) 523e4b17023SJohn Marino cached_lhs = TREE_OPERAND (cached_lhs, 0); 524e4b17023SJohn Marino 525e4b17023SJohn Marino fold_undefer_overflow_warnings ((cached_lhs 526e4b17023SJohn Marino && is_gimple_min_invariant (cached_lhs)), 527e4b17023SJohn Marino stmt, WARN_STRICT_OVERFLOW_CONDITIONAL); 528e4b17023SJohn Marino 529e4b17023SJohn Marino /* If we have not simplified the condition down to an invariant, 530e4b17023SJohn Marino then use the pass specific callback to simplify the condition. */ 531e4b17023SJohn Marino if (!cached_lhs 532e4b17023SJohn Marino || !is_gimple_min_invariant (cached_lhs)) 533e4b17023SJohn Marino cached_lhs = (*simplify) (dummy_cond, stmt); 534e4b17023SJohn Marino 535e4b17023SJohn Marino return cached_lhs; 536e4b17023SJohn Marino } 537e4b17023SJohn Marino 538e4b17023SJohn Marino if (code == GIMPLE_SWITCH) 539e4b17023SJohn Marino cond = gimple_switch_index (stmt); 540e4b17023SJohn Marino else if (code == GIMPLE_GOTO) 541e4b17023SJohn Marino cond = gimple_goto_dest (stmt); 542e4b17023SJohn Marino else 543e4b17023SJohn Marino gcc_unreachable (); 544e4b17023SJohn Marino 545e4b17023SJohn Marino /* We can have conditionals which just test the state of a variable 546e4b17023SJohn Marino rather than use a relational operator. These are simpler to handle. */ 547e4b17023SJohn Marino if (TREE_CODE (cond) == SSA_NAME) 548e4b17023SJohn Marino { 549e4b17023SJohn Marino cached_lhs = cond; 550e4b17023SJohn Marino 551e4b17023SJohn Marino /* Get the variable's current value from the equivalence chains. 552e4b17023SJohn Marino 553e4b17023SJohn Marino It is possible to get loops in the SSA_NAME_VALUE chains 554e4b17023SJohn Marino (consider threading the backedge of a loop where we have 555e4b17023SJohn Marino a loop invariant SSA_NAME used in the condition. */ 556e4b17023SJohn Marino if (cached_lhs 557e4b17023SJohn Marino && TREE_CODE (cached_lhs) == SSA_NAME 558e4b17023SJohn Marino && SSA_NAME_VALUE (cached_lhs)) 559e4b17023SJohn Marino cached_lhs = SSA_NAME_VALUE (cached_lhs); 560e4b17023SJohn Marino 561e4b17023SJohn Marino /* If we're dominated by a suitable ASSERT_EXPR, then 562e4b17023SJohn Marino update CACHED_LHS appropriately. */ 563e4b17023SJohn Marino if (handle_dominating_asserts && TREE_CODE (cached_lhs) == SSA_NAME) 564e4b17023SJohn Marino cached_lhs = lhs_of_dominating_assert (cached_lhs, e->src, stmt); 565e4b17023SJohn Marino 566e4b17023SJohn Marino /* If we haven't simplified to an invariant yet, then use the 567e4b17023SJohn Marino pass specific callback to try and simplify it further. */ 568e4b17023SJohn Marino if (cached_lhs && ! is_gimple_min_invariant (cached_lhs)) 569e4b17023SJohn Marino cached_lhs = (*simplify) (stmt, stmt); 570e4b17023SJohn Marino } 571e4b17023SJohn Marino else 572e4b17023SJohn Marino cached_lhs = NULL; 573e4b17023SJohn Marino 574e4b17023SJohn Marino return cached_lhs; 575e4b17023SJohn Marino } 576e4b17023SJohn Marino 577*5ce9237cSJohn Marino /* Return TRUE if the statement at the end of e->dest depends on 578*5ce9237cSJohn Marino the output of any statement in BB. Otherwise return FALSE. 579*5ce9237cSJohn Marino 580*5ce9237cSJohn Marino This is used when we are threading a backedge and need to ensure 581*5ce9237cSJohn Marino that temporary equivalences from BB do not affect the condition 582*5ce9237cSJohn Marino in e->dest. */ 583*5ce9237cSJohn Marino 584*5ce9237cSJohn Marino static bool 585*5ce9237cSJohn Marino cond_arg_set_in_bb (edge e, basic_block bb) 586*5ce9237cSJohn Marino { 587*5ce9237cSJohn Marino ssa_op_iter iter; 588*5ce9237cSJohn Marino use_operand_p use_p; 589*5ce9237cSJohn Marino gimple last = last_stmt (e->dest); 590*5ce9237cSJohn Marino 591*5ce9237cSJohn Marino /* E->dest does not have to end with a control transferring 592*5ce9237cSJohn Marino instruction. This can occurr when we try to extend a jump 593*5ce9237cSJohn Marino threading opportunity deeper into the CFG. In that case 594*5ce9237cSJohn Marino it is safe for this check to return false. */ 595*5ce9237cSJohn Marino if (!last) 596*5ce9237cSJohn Marino return false; 597*5ce9237cSJohn Marino 598*5ce9237cSJohn Marino if (gimple_code (last) != GIMPLE_COND 599*5ce9237cSJohn Marino && gimple_code (last) != GIMPLE_GOTO 600*5ce9237cSJohn Marino && gimple_code (last) != GIMPLE_SWITCH) 601*5ce9237cSJohn Marino return false; 602*5ce9237cSJohn Marino 603*5ce9237cSJohn Marino FOR_EACH_SSA_USE_OPERAND (use_p, last, iter, SSA_OP_USE | SSA_OP_VUSE) 604*5ce9237cSJohn Marino { 605*5ce9237cSJohn Marino tree use = USE_FROM_PTR (use_p); 606*5ce9237cSJohn Marino 607*5ce9237cSJohn Marino if (TREE_CODE (use) == SSA_NAME 608*5ce9237cSJohn Marino && gimple_code (SSA_NAME_DEF_STMT (use)) != GIMPLE_PHI 609*5ce9237cSJohn Marino && gimple_bb (SSA_NAME_DEF_STMT (use)) == bb) 610*5ce9237cSJohn Marino return true; 611*5ce9237cSJohn Marino } 612*5ce9237cSJohn Marino return false; 613*5ce9237cSJohn Marino } 614*5ce9237cSJohn Marino 615e4b17023SJohn Marino /* TAKEN_EDGE represents the an edge taken as a result of jump threading. 616e4b17023SJohn Marino See if we can thread around TAKEN_EDGE->dest as well. If so, return 617e4b17023SJohn Marino the edge out of TAKEN_EDGE->dest that we can statically compute will be 618e4b17023SJohn Marino traversed. 619e4b17023SJohn Marino 620e4b17023SJohn Marino We are much more restrictive as to the contents of TAKEN_EDGE->dest 621e4b17023SJohn Marino as the path isolation code in tree-ssa-threadupdate.c isn't prepared 622e4b17023SJohn Marino to handle copying intermediate blocks on a threaded path. 623e4b17023SJohn Marino 624e4b17023SJohn Marino Long term a more consistent and structured approach to path isolation 625e4b17023SJohn Marino would be a huge help. */ 626e4b17023SJohn Marino static edge 627e4b17023SJohn Marino thread_around_empty_block (edge taken_edge, 628e4b17023SJohn Marino gimple dummy_cond, 629e4b17023SJohn Marino bool handle_dominating_asserts, 630e4b17023SJohn Marino tree (*simplify) (gimple, gimple), 631e4b17023SJohn Marino bitmap visited) 632e4b17023SJohn Marino { 633e4b17023SJohn Marino basic_block bb = taken_edge->dest; 634e4b17023SJohn Marino gimple_stmt_iterator gsi; 635e4b17023SJohn Marino gimple stmt; 636e4b17023SJohn Marino tree cond; 637e4b17023SJohn Marino 638e4b17023SJohn Marino /* This block must have a single predecessor (E->dest). */ 639e4b17023SJohn Marino if (!single_pred_p (bb)) 640e4b17023SJohn Marino return NULL; 641e4b17023SJohn Marino 642e4b17023SJohn Marino /* This block must have more than one successor. */ 643e4b17023SJohn Marino if (single_succ_p (bb)) 644e4b17023SJohn Marino return NULL; 645e4b17023SJohn Marino 646e4b17023SJohn Marino /* This block can have no PHI nodes. This is overly conservative. */ 647e4b17023SJohn Marino if (!gsi_end_p (gsi_start_phis (bb))) 648e4b17023SJohn Marino return NULL; 649e4b17023SJohn Marino 650e4b17023SJohn Marino /* Skip over DEBUG statements at the start of the block. */ 651e4b17023SJohn Marino gsi = gsi_start_nondebug_bb (bb); 652e4b17023SJohn Marino 653e4b17023SJohn Marino if (gsi_end_p (gsi)) 654e4b17023SJohn Marino return NULL; 655e4b17023SJohn Marino 656e4b17023SJohn Marino /* This block can have no statements other than its control altering 657e4b17023SJohn Marino statement. This is overly conservative. */ 658e4b17023SJohn Marino stmt = gsi_stmt (gsi); 659e4b17023SJohn Marino if (gimple_code (stmt) != GIMPLE_COND 660e4b17023SJohn Marino && gimple_code (stmt) != GIMPLE_GOTO 661e4b17023SJohn Marino && gimple_code (stmt) != GIMPLE_SWITCH) 662e4b17023SJohn Marino return NULL; 663e4b17023SJohn Marino 664e4b17023SJohn Marino /* Extract and simplify the condition. */ 665e4b17023SJohn Marino cond = simplify_control_stmt_condition (taken_edge, stmt, dummy_cond, 666e4b17023SJohn Marino simplify, handle_dominating_asserts); 667e4b17023SJohn Marino 668e4b17023SJohn Marino /* If the condition can be statically computed and we have not already 669e4b17023SJohn Marino visited the destination edge, then add the taken edge to our thread 670e4b17023SJohn Marino path. */ 671e4b17023SJohn Marino if (cond && is_gimple_min_invariant (cond)) 672e4b17023SJohn Marino { 673e4b17023SJohn Marino edge taken_edge = find_taken_edge (bb, cond); 674e4b17023SJohn Marino 675e4b17023SJohn Marino if (bitmap_bit_p (visited, taken_edge->dest->index)) 676e4b17023SJohn Marino return NULL; 677e4b17023SJohn Marino bitmap_set_bit (visited, taken_edge->dest->index); 678e4b17023SJohn Marino return taken_edge; 679e4b17023SJohn Marino } 680e4b17023SJohn Marino 681e4b17023SJohn Marino return NULL; 682e4b17023SJohn Marino } 683e4b17023SJohn Marino 684e4b17023SJohn Marino /* E1 and E2 are edges into the same basic block. Return TRUE if the 685e4b17023SJohn Marino PHI arguments associated with those edges are equal or there are no 686e4b17023SJohn Marino PHI arguments, otherwise return FALSE. */ 687e4b17023SJohn Marino 688e4b17023SJohn Marino static bool 689e4b17023SJohn Marino phi_args_equal_on_edges (edge e1, edge e2) 690e4b17023SJohn Marino { 691e4b17023SJohn Marino gimple_stmt_iterator gsi; 692e4b17023SJohn Marino int indx1 = e1->dest_idx; 693e4b17023SJohn Marino int indx2 = e2->dest_idx; 694e4b17023SJohn Marino 695e4b17023SJohn Marino for (gsi = gsi_start_phis (e1->dest); !gsi_end_p (gsi); gsi_next (&gsi)) 696e4b17023SJohn Marino { 697e4b17023SJohn Marino gimple phi = gsi_stmt (gsi); 698e4b17023SJohn Marino 699e4b17023SJohn Marino if (!operand_equal_p (gimple_phi_arg_def (phi, indx1), 700e4b17023SJohn Marino gimple_phi_arg_def (phi, indx2), 0)) 701e4b17023SJohn Marino return false; 702e4b17023SJohn Marino } 703e4b17023SJohn Marino return true; 704e4b17023SJohn Marino } 705e4b17023SJohn Marino 706e4b17023SJohn Marino /* We are exiting E->src, see if E->dest ends with a conditional 707e4b17023SJohn Marino jump which has a known value when reached via E. 708e4b17023SJohn Marino 709e4b17023SJohn Marino Special care is necessary if E is a back edge in the CFG as we 710e4b17023SJohn Marino may have already recorded equivalences for E->dest into our 711e4b17023SJohn Marino various tables, including the result of the conditional at 712e4b17023SJohn Marino the end of E->dest. Threading opportunities are severely 713e4b17023SJohn Marino limited in that case to avoid short-circuiting the loop 714e4b17023SJohn Marino incorrectly. 715e4b17023SJohn Marino 716e4b17023SJohn Marino Note it is quite common for the first block inside a loop to 717e4b17023SJohn Marino end with a conditional which is either always true or always 718e4b17023SJohn Marino false when reached via the loop backedge. Thus we do not want 719e4b17023SJohn Marino to blindly disable threading across a loop backedge. 720e4b17023SJohn Marino 721e4b17023SJohn Marino DUMMY_COND is a shared cond_expr used by condition simplification as scratch, 722e4b17023SJohn Marino to avoid allocating memory. 723e4b17023SJohn Marino 724e4b17023SJohn Marino HANDLE_DOMINATING_ASSERTS is true if we should try to replace operands of 725e4b17023SJohn Marino the simplified condition with left-hand sides of ASSERT_EXPRs they are 726e4b17023SJohn Marino used in. 727e4b17023SJohn Marino 728e4b17023SJohn Marino STACK is used to undo temporary equivalences created during the walk of 729e4b17023SJohn Marino E->dest. 730e4b17023SJohn Marino 731e4b17023SJohn Marino SIMPLIFY is a pass-specific function used to simplify statements. */ 732e4b17023SJohn Marino 733e4b17023SJohn Marino void 734e4b17023SJohn Marino thread_across_edge (gimple dummy_cond, 735e4b17023SJohn Marino edge e, 736e4b17023SJohn Marino bool handle_dominating_asserts, 737e4b17023SJohn Marino VEC(tree, heap) **stack, 738e4b17023SJohn Marino tree (*simplify) (gimple, gimple)) 739e4b17023SJohn Marino { 740e4b17023SJohn Marino gimple stmt; 741e4b17023SJohn Marino 742e4b17023SJohn Marino /* If E is a backedge, then we want to verify that the COND_EXPR, 743e4b17023SJohn Marino SWITCH_EXPR or GOTO_EXPR at the end of e->dest is not affected 744e4b17023SJohn Marino by any statements in e->dest. If it is affected, then it is not 745e4b17023SJohn Marino safe to thread this edge. */ 746e4b17023SJohn Marino if (e->flags & EDGE_DFS_BACK) 747e4b17023SJohn Marino { 748*5ce9237cSJohn Marino if (cond_arg_set_in_bb (e, e->dest)) 749e4b17023SJohn Marino goto fail; 750e4b17023SJohn Marino } 751e4b17023SJohn Marino 752e4b17023SJohn Marino stmt_count = 0; 753e4b17023SJohn Marino 754e4b17023SJohn Marino /* PHIs create temporary equivalences. */ 755e4b17023SJohn Marino if (!record_temporary_equivalences_from_phis (e, stack)) 756e4b17023SJohn Marino goto fail; 757e4b17023SJohn Marino 758e4b17023SJohn Marino /* Now walk each statement recording any context sensitive 759e4b17023SJohn Marino temporary equivalences we can detect. */ 760e4b17023SJohn Marino stmt = record_temporary_equivalences_from_stmts_at_dest (e, stack, simplify); 761e4b17023SJohn Marino if (!stmt) 762e4b17023SJohn Marino goto fail; 763e4b17023SJohn Marino 764e4b17023SJohn Marino /* If we stopped at a COND_EXPR or SWITCH_EXPR, see if we know which arm 765e4b17023SJohn Marino will be taken. */ 766e4b17023SJohn Marino if (gimple_code (stmt) == GIMPLE_COND 767e4b17023SJohn Marino || gimple_code (stmt) == GIMPLE_GOTO 768e4b17023SJohn Marino || gimple_code (stmt) == GIMPLE_SWITCH) 769e4b17023SJohn Marino { 770e4b17023SJohn Marino tree cond; 771e4b17023SJohn Marino 772e4b17023SJohn Marino /* Extract and simplify the condition. */ 773e4b17023SJohn Marino cond = simplify_control_stmt_condition (e, stmt, dummy_cond, simplify, 774e4b17023SJohn Marino handle_dominating_asserts); 775e4b17023SJohn Marino 776e4b17023SJohn Marino if (cond && is_gimple_min_invariant (cond)) 777e4b17023SJohn Marino { 778e4b17023SJohn Marino edge taken_edge = find_taken_edge (e->dest, cond); 779e4b17023SJohn Marino basic_block dest = (taken_edge ? taken_edge->dest : NULL); 780e4b17023SJohn Marino bitmap visited; 781e4b17023SJohn Marino edge e2; 782e4b17023SJohn Marino 783e4b17023SJohn Marino if (dest == e->dest) 784e4b17023SJohn Marino goto fail; 785e4b17023SJohn Marino 786e4b17023SJohn Marino /* DEST could be null for a computed jump to an absolute 787e4b17023SJohn Marino address. If DEST is not null, then see if we can thread 788e4b17023SJohn Marino through it as well, this helps capture secondary effects 789e4b17023SJohn Marino of threading without having to re-run DOM or VRP. */ 790*5ce9237cSJohn Marino if (dest 791*5ce9237cSJohn Marino && ((e->flags & EDGE_DFS_BACK) == 0 792*5ce9237cSJohn Marino || ! cond_arg_set_in_bb (taken_edge, e->dest))) 793e4b17023SJohn Marino { 794e4b17023SJohn Marino /* We don't want to thread back to a block we have already 795e4b17023SJohn Marino visited. This may be overly conservative. */ 796e4b17023SJohn Marino visited = BITMAP_ALLOC (NULL); 797e4b17023SJohn Marino bitmap_set_bit (visited, dest->index); 798e4b17023SJohn Marino bitmap_set_bit (visited, e->dest->index); 799e4b17023SJohn Marino do 800e4b17023SJohn Marino { 801e4b17023SJohn Marino e2 = thread_around_empty_block (taken_edge, 802e4b17023SJohn Marino dummy_cond, 803e4b17023SJohn Marino handle_dominating_asserts, 804e4b17023SJohn Marino simplify, 805e4b17023SJohn Marino visited); 806e4b17023SJohn Marino if (e2) 807e4b17023SJohn Marino taken_edge = e2; 808e4b17023SJohn Marino } 809e4b17023SJohn Marino while (e2); 810e4b17023SJohn Marino BITMAP_FREE (visited); 811e4b17023SJohn Marino } 812e4b17023SJohn Marino 813e4b17023SJohn Marino remove_temporary_equivalences (stack); 814e4b17023SJohn Marino register_jump_thread (e, taken_edge, NULL); 815e4b17023SJohn Marino return; 816e4b17023SJohn Marino } 817e4b17023SJohn Marino } 818e4b17023SJohn Marino 819e4b17023SJohn Marino /* We were unable to determine what out edge from E->dest is taken. However, 820e4b17023SJohn Marino we might still be able to thread through successors of E->dest. This 821e4b17023SJohn Marino often occurs when E->dest is a joiner block which then fans back out 822e4b17023SJohn Marino based on redundant tests. 823e4b17023SJohn Marino 824e4b17023SJohn Marino If so, we'll copy E->dest and redirect the appropriate predecessor to 825e4b17023SJohn Marino the copy. Within the copy of E->dest, we'll thread one or more edges 826e4b17023SJohn Marino to points deeper in the CFG. 827e4b17023SJohn Marino 828e4b17023SJohn Marino This is a stopgap until we have a more structured approach to path 829e4b17023SJohn Marino isolation. */ 830e4b17023SJohn Marino { 831e4b17023SJohn Marino edge e2, e3, taken_edge; 832e4b17023SJohn Marino edge_iterator ei; 833e4b17023SJohn Marino bool found = false; 834e4b17023SJohn Marino bitmap visited = BITMAP_ALLOC (NULL); 835e4b17023SJohn Marino 836e4b17023SJohn Marino /* Look at each successor of E->dest to see if we can thread through it. */ 837e4b17023SJohn Marino FOR_EACH_EDGE (taken_edge, ei, e->dest->succs) 838e4b17023SJohn Marino { 839e4b17023SJohn Marino /* Avoid threading to any block we have already visited. */ 840e4b17023SJohn Marino bitmap_clear (visited); 841e4b17023SJohn Marino bitmap_set_bit (visited, taken_edge->dest->index); 842e4b17023SJohn Marino bitmap_set_bit (visited, e->dest->index); 843e4b17023SJohn Marino 844e4b17023SJohn Marino /* Record whether or not we were able to thread through a successor 845e4b17023SJohn Marino of E->dest. */ 846e4b17023SJohn Marino found = false; 847e4b17023SJohn Marino e3 = taken_edge; 848e4b17023SJohn Marino do 849e4b17023SJohn Marino { 850*5ce9237cSJohn Marino if ((e->flags & EDGE_DFS_BACK) == 0 851*5ce9237cSJohn Marino || ! cond_arg_set_in_bb (e3, e->dest)) 852e4b17023SJohn Marino e2 = thread_around_empty_block (e3, 853e4b17023SJohn Marino dummy_cond, 854e4b17023SJohn Marino handle_dominating_asserts, 855e4b17023SJohn Marino simplify, 856e4b17023SJohn Marino visited); 857*5ce9237cSJohn Marino else 858*5ce9237cSJohn Marino e2 = NULL; 859*5ce9237cSJohn Marino 860e4b17023SJohn Marino if (e2) 861e4b17023SJohn Marino { 862e4b17023SJohn Marino e3 = e2; 863e4b17023SJohn Marino found = true; 864e4b17023SJohn Marino } 865e4b17023SJohn Marino } 866e4b17023SJohn Marino while (e2); 867e4b17023SJohn Marino 868e4b17023SJohn Marino /* If we were able to thread through a successor of E->dest, then 869e4b17023SJohn Marino record the jump threading opportunity. */ 870e4b17023SJohn Marino if (found) 871e4b17023SJohn Marino { 872e4b17023SJohn Marino edge tmp; 873e4b17023SJohn Marino /* If there is already an edge from the block to be duplicated 874e4b17023SJohn Marino (E2->src) to the final target (E3->dest), then make sure that 875e4b17023SJohn Marino the PHI args associated with the edges E2 and E3 are the 876e4b17023SJohn Marino same. */ 877e4b17023SJohn Marino tmp = find_edge (taken_edge->src, e3->dest); 878e4b17023SJohn Marino if (!tmp || phi_args_equal_on_edges (tmp, e3)) 879e4b17023SJohn Marino register_jump_thread (e, taken_edge, e3); 880e4b17023SJohn Marino } 881e4b17023SJohn Marino 882e4b17023SJohn Marino } 883e4b17023SJohn Marino BITMAP_FREE (visited); 884e4b17023SJohn Marino } 885e4b17023SJohn Marino 886e4b17023SJohn Marino fail: 887e4b17023SJohn Marino remove_temporary_equivalences (stack); 888e4b17023SJohn Marino } 889