11debfc3dSmrg /* Backward propagation of indirect loads through PHIs.
2*8feb0f0bSmrg Copyright (C) 2007-2020 Free Software Foundation, Inc.
31debfc3dSmrg Contributed by Richard Guenther <rguenther@suse.de>
41debfc3dSmrg
51debfc3dSmrg This file is part of GCC.
61debfc3dSmrg
71debfc3dSmrg GCC is free software; you can redistribute it and/or modify
81debfc3dSmrg it under the terms of the GNU General Public License as published by
91debfc3dSmrg the Free Software Foundation; either version 3, or (at your option)
101debfc3dSmrg any later version.
111debfc3dSmrg
121debfc3dSmrg GCC is distributed in the hope that it will be useful,
131debfc3dSmrg but WITHOUT ANY WARRANTY; without even the implied warranty of
141debfc3dSmrg MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
151debfc3dSmrg GNU General Public License for more details.
161debfc3dSmrg
171debfc3dSmrg You should have received a copy of the GNU General Public License
181debfc3dSmrg along with GCC; see the file COPYING3. If not see
191debfc3dSmrg <http://www.gnu.org/licenses/>. */
201debfc3dSmrg
211debfc3dSmrg #include "config.h"
221debfc3dSmrg #include "system.h"
231debfc3dSmrg #include "coretypes.h"
241debfc3dSmrg #include "backend.h"
251debfc3dSmrg #include "tree.h"
261debfc3dSmrg #include "gimple.h"
271debfc3dSmrg #include "tree-pass.h"
281debfc3dSmrg #include "ssa.h"
291debfc3dSmrg #include "gimple-pretty-print.h"
301debfc3dSmrg #include "fold-const.h"
311debfc3dSmrg #include "tree-eh.h"
321debfc3dSmrg #include "gimplify.h"
331debfc3dSmrg #include "gimple-iterator.h"
341debfc3dSmrg #include "stor-layout.h"
351debfc3dSmrg #include "tree-ssa-loop.h"
361debfc3dSmrg
371debfc3dSmrg /* This pass propagates indirect loads through the PHI node for its
381debfc3dSmrg address to make the load source possibly non-addressable and to
391debfc3dSmrg allow for PHI optimization to trigger.
401debfc3dSmrg
411debfc3dSmrg For example the pass changes
421debfc3dSmrg
431debfc3dSmrg # addr_1 = PHI <&a, &b>
441debfc3dSmrg tmp_1 = *addr_1;
451debfc3dSmrg
461debfc3dSmrg to
471debfc3dSmrg
481debfc3dSmrg # tmp_1 = PHI <a, b>
491debfc3dSmrg
501debfc3dSmrg but also handles more complex scenarios like
511debfc3dSmrg
521debfc3dSmrg D.2077_2 = &this_1(D)->a1;
531debfc3dSmrg ...
541debfc3dSmrg
551debfc3dSmrg # b_12 = PHI <&c(2), D.2077_2(3)>
561debfc3dSmrg D.2114_13 = *b_12;
571debfc3dSmrg ...
581debfc3dSmrg
591debfc3dSmrg # b_15 = PHI <b_12(4), &b(5)>
601debfc3dSmrg D.2080_5 = &this_1(D)->a0;
611debfc3dSmrg ...
621debfc3dSmrg
631debfc3dSmrg # b_18 = PHI <D.2080_5(6), &c(7)>
641debfc3dSmrg ...
651debfc3dSmrg
661debfc3dSmrg # b_21 = PHI <b_15(8), b_18(9)>
671debfc3dSmrg D.2076_8 = *b_21;
681debfc3dSmrg
691debfc3dSmrg where the addresses loaded are defined by PHIs itself.
701debfc3dSmrg The above happens for
711debfc3dSmrg
721debfc3dSmrg std::max(std::min(a0, c), std::min(std::max(a1, c), b))
731debfc3dSmrg
741debfc3dSmrg where this pass transforms it to a form later PHI optimization
751debfc3dSmrg recognizes and transforms it to the simple
761debfc3dSmrg
771debfc3dSmrg D.2109_10 = this_1(D)->a1;
781debfc3dSmrg D.2110_11 = c;
791debfc3dSmrg D.2114_31 = MAX_EXPR <D.2109_10, D.2110_11>;
801debfc3dSmrg D.2115_14 = b;
811debfc3dSmrg D.2125_17 = MIN_EXPR <D.2115_14, D.2114_31>;
821debfc3dSmrg D.2119_16 = this_1(D)->a0;
831debfc3dSmrg D.2124_32 = MIN_EXPR <D.2110_11, D.2119_16>;
841debfc3dSmrg D.2076_33 = MAX_EXPR <D.2125_17, D.2124_32>;
851debfc3dSmrg
861debfc3dSmrg The pass does a dominator walk processing loads using a basic-block
871debfc3dSmrg local analysis and stores the result for use by transformations on
881debfc3dSmrg dominated basic-blocks. */
891debfc3dSmrg
901debfc3dSmrg
911debfc3dSmrg /* Structure to keep track of the value of a dereferenced PHI result
921debfc3dSmrg and the virtual operand used for that dereference. */
931debfc3dSmrg
941debfc3dSmrg struct phiprop_d
951debfc3dSmrg {
961debfc3dSmrg tree value;
971debfc3dSmrg tree vuse;
981debfc3dSmrg };
991debfc3dSmrg
1001debfc3dSmrg /* Verify if the value recorded for NAME in PHIVN is still valid at
1011debfc3dSmrg the start of basic block BB. */
1021debfc3dSmrg
1031debfc3dSmrg static bool
phivn_valid_p(struct phiprop_d * phivn,tree name,basic_block bb)1041debfc3dSmrg phivn_valid_p (struct phiprop_d *phivn, tree name, basic_block bb)
1051debfc3dSmrg {
1061debfc3dSmrg tree vuse = phivn[SSA_NAME_VERSION (name)].vuse;
1071debfc3dSmrg gimple *use_stmt;
1081debfc3dSmrg imm_use_iterator ui2;
1091debfc3dSmrg bool ok = true;
1101debfc3dSmrg
1111debfc3dSmrg /* The def stmts of the virtual uses need to be dominated by bb. */
1121debfc3dSmrg gcc_assert (vuse != NULL_TREE);
1131debfc3dSmrg
1141debfc3dSmrg FOR_EACH_IMM_USE_STMT (use_stmt, ui2, vuse)
1151debfc3dSmrg {
1161debfc3dSmrg /* If BB does not dominate a VDEF, the value is invalid. */
1171debfc3dSmrg if ((gimple_vdef (use_stmt) != NULL_TREE
1181debfc3dSmrg || gimple_code (use_stmt) == GIMPLE_PHI)
1191debfc3dSmrg && !dominated_by_p (CDI_DOMINATORS, gimple_bb (use_stmt), bb))
1201debfc3dSmrg {
1211debfc3dSmrg ok = false;
1221debfc3dSmrg BREAK_FROM_IMM_USE_STMT (ui2);
1231debfc3dSmrg }
1241debfc3dSmrg }
1251debfc3dSmrg
1261debfc3dSmrg return ok;
1271debfc3dSmrg }
1281debfc3dSmrg
1291debfc3dSmrg /* Insert a new phi node for the dereference of PHI at basic_block
1301debfc3dSmrg BB with the virtual operands from USE_STMT. */
1311debfc3dSmrg
1321debfc3dSmrg static tree
phiprop_insert_phi(basic_block bb,gphi * phi,gimple * use_stmt,struct phiprop_d * phivn,size_t n)1331debfc3dSmrg phiprop_insert_phi (basic_block bb, gphi *phi, gimple *use_stmt,
1341debfc3dSmrg struct phiprop_d *phivn, size_t n)
1351debfc3dSmrg {
1361debfc3dSmrg tree res;
1371debfc3dSmrg gphi *new_phi = NULL;
1381debfc3dSmrg edge_iterator ei;
1391debfc3dSmrg edge e;
1401debfc3dSmrg
1411debfc3dSmrg gcc_assert (is_gimple_assign (use_stmt)
1421debfc3dSmrg && gimple_assign_rhs_code (use_stmt) == MEM_REF);
1431debfc3dSmrg
1441debfc3dSmrg /* Build a new PHI node to replace the definition of
1451debfc3dSmrg the indirect reference lhs. */
1461debfc3dSmrg res = gimple_assign_lhs (use_stmt);
1471debfc3dSmrg if (TREE_CODE (res) == SSA_NAME)
1481debfc3dSmrg new_phi = create_phi_node (res, bb);
1491debfc3dSmrg
1501debfc3dSmrg if (dump_file && (dump_flags & TDF_DETAILS))
1511debfc3dSmrg {
1521debfc3dSmrg fprintf (dump_file, "Inserting PHI for result of load ");
153a2dc1f3fSmrg print_gimple_stmt (dump_file, use_stmt, 0);
1541debfc3dSmrg }
1551debfc3dSmrg
1561debfc3dSmrg /* Add PHI arguments for each edge inserting loads of the
1571debfc3dSmrg addressable operands. */
1581debfc3dSmrg FOR_EACH_EDGE (e, ei, bb->preds)
1591debfc3dSmrg {
1601debfc3dSmrg tree old_arg, new_var;
1611debfc3dSmrg gassign *tmp;
162c0a68be4Smrg location_t locus;
1631debfc3dSmrg
1641debfc3dSmrg old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
1651debfc3dSmrg locus = gimple_phi_arg_location_from_edge (phi, e);
1661debfc3dSmrg while (TREE_CODE (old_arg) == SSA_NAME
1671debfc3dSmrg && (SSA_NAME_VERSION (old_arg) >= n
1681debfc3dSmrg || phivn[SSA_NAME_VERSION (old_arg)].value == NULL_TREE))
1691debfc3dSmrg {
1701debfc3dSmrg gimple *def_stmt = SSA_NAME_DEF_STMT (old_arg);
1711debfc3dSmrg old_arg = gimple_assign_rhs1 (def_stmt);
1721debfc3dSmrg locus = gimple_location (def_stmt);
1731debfc3dSmrg }
1741debfc3dSmrg
1751debfc3dSmrg if (TREE_CODE (old_arg) == SSA_NAME)
1761debfc3dSmrg {
1771debfc3dSmrg if (dump_file && (dump_flags & TDF_DETAILS))
1781debfc3dSmrg {
1791debfc3dSmrg fprintf (dump_file, " for edge defining ");
180a2dc1f3fSmrg print_generic_expr (dump_file, PHI_ARG_DEF_FROM_EDGE (phi, e));
1811debfc3dSmrg fprintf (dump_file, " reusing PHI result ");
1821debfc3dSmrg print_generic_expr (dump_file,
183a2dc1f3fSmrg phivn[SSA_NAME_VERSION (old_arg)].value);
1841debfc3dSmrg fprintf (dump_file, "\n");
1851debfc3dSmrg }
1861debfc3dSmrg /* Reuse a formerly created dereference. */
1871debfc3dSmrg new_var = phivn[SSA_NAME_VERSION (old_arg)].value;
1881debfc3dSmrg }
1891debfc3dSmrg else
1901debfc3dSmrg {
1911debfc3dSmrg tree rhs = gimple_assign_rhs1 (use_stmt);
1921debfc3dSmrg gcc_assert (TREE_CODE (old_arg) == ADDR_EXPR);
1931debfc3dSmrg if (TREE_CODE (res) == SSA_NAME)
1941debfc3dSmrg new_var = make_ssa_name (TREE_TYPE (rhs));
1951debfc3dSmrg else
1961debfc3dSmrg new_var = unshare_expr (res);
1971debfc3dSmrg if (!is_gimple_min_invariant (old_arg))
1981debfc3dSmrg old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
1991debfc3dSmrg else
2001debfc3dSmrg old_arg = unshare_expr (old_arg);
2011debfc3dSmrg tmp = gimple_build_assign (new_var,
2021debfc3dSmrg fold_build2 (MEM_REF, TREE_TYPE (rhs),
2031debfc3dSmrg old_arg,
2041debfc3dSmrg TREE_OPERAND (rhs, 1)));
2051debfc3dSmrg gimple_set_location (tmp, locus);
2061debfc3dSmrg
2071debfc3dSmrg gsi_insert_on_edge (e, tmp);
2081debfc3dSmrg update_stmt (tmp);
2091debfc3dSmrg
2101debfc3dSmrg if (dump_file && (dump_flags & TDF_DETAILS))
2111debfc3dSmrg {
2121debfc3dSmrg fprintf (dump_file, " for edge defining ");
213a2dc1f3fSmrg print_generic_expr (dump_file, PHI_ARG_DEF_FROM_EDGE (phi, e));
2141debfc3dSmrg fprintf (dump_file, " inserting load ");
215a2dc1f3fSmrg print_gimple_stmt (dump_file, tmp, 0);
2161debfc3dSmrg }
2171debfc3dSmrg }
2181debfc3dSmrg
2191debfc3dSmrg if (new_phi)
2201debfc3dSmrg add_phi_arg (new_phi, new_var, e, locus);
2211debfc3dSmrg }
2221debfc3dSmrg
2231debfc3dSmrg if (new_phi)
2241debfc3dSmrg {
2251debfc3dSmrg update_stmt (new_phi);
2261debfc3dSmrg
2271debfc3dSmrg if (dump_file && (dump_flags & TDF_DETAILS))
228a2dc1f3fSmrg print_gimple_stmt (dump_file, new_phi, 0);
2291debfc3dSmrg }
2301debfc3dSmrg
2311debfc3dSmrg return res;
2321debfc3dSmrg }
2331debfc3dSmrg
2341debfc3dSmrg /* Verify if *idx is available at *DATA. */
2351debfc3dSmrg
2361debfc3dSmrg static bool
chk_uses(tree,tree * idx,void * data)2371debfc3dSmrg chk_uses (tree, tree *idx, void *data)
2381debfc3dSmrg {
2391debfc3dSmrg basic_block dom = (basic_block) data;
2401debfc3dSmrg if (TREE_CODE (*idx) == SSA_NAME)
2411debfc3dSmrg return (SSA_NAME_IS_DEFAULT_DEF (*idx)
2421debfc3dSmrg || ! dominated_by_p (CDI_DOMINATORS,
2431debfc3dSmrg gimple_bb (SSA_NAME_DEF_STMT (*idx)), dom));
2441debfc3dSmrg return true;
2451debfc3dSmrg }
2461debfc3dSmrg
2471debfc3dSmrg /* Propagate between the phi node arguments of PHI in BB and phi result
2481debfc3dSmrg users. For now this matches
2491debfc3dSmrg # p_2 = PHI <&x, &y>
2501debfc3dSmrg <Lx>:;
2511debfc3dSmrg p_3 = p_2;
2521debfc3dSmrg z_2 = *p_3;
2531debfc3dSmrg and converts it to
2541debfc3dSmrg # z_2 = PHI <x, y>
2551debfc3dSmrg <Lx>:;
2561debfc3dSmrg Returns true if a transformation was done and edge insertions
2571debfc3dSmrg need to be committed. Global data PHIVN and N is used to track
2581debfc3dSmrg past transformation results. We need to be especially careful here
2591debfc3dSmrg with aliasing issues as we are moving memory reads. */
2601debfc3dSmrg
2611debfc3dSmrg static bool
propagate_with_phi(basic_block bb,gphi * phi,struct phiprop_d * phivn,size_t n)2621debfc3dSmrg propagate_with_phi (basic_block bb, gphi *phi, struct phiprop_d *phivn,
2631debfc3dSmrg size_t n)
2641debfc3dSmrg {
2651debfc3dSmrg tree ptr = PHI_RESULT (phi);
2661debfc3dSmrg gimple *use_stmt;
2671debfc3dSmrg tree res = NULL_TREE;
2681debfc3dSmrg gimple_stmt_iterator gsi;
2691debfc3dSmrg imm_use_iterator ui;
2701debfc3dSmrg use_operand_p arg_p, use;
2711debfc3dSmrg ssa_op_iter i;
2721debfc3dSmrg bool phi_inserted;
2731debfc3dSmrg bool changed;
2741debfc3dSmrg tree type = NULL_TREE;
2751debfc3dSmrg
2761debfc3dSmrg if (!POINTER_TYPE_P (TREE_TYPE (ptr))
2771debfc3dSmrg || (!is_gimple_reg_type (TREE_TYPE (TREE_TYPE (ptr)))
2781debfc3dSmrg && TYPE_MODE (TREE_TYPE (TREE_TYPE (ptr))) == BLKmode))
2791debfc3dSmrg return false;
2801debfc3dSmrg
2811debfc3dSmrg /* Check if we can "cheaply" dereference all phi arguments. */
2821debfc3dSmrg FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_USE)
2831debfc3dSmrg {
2841debfc3dSmrg tree arg = USE_FROM_PTR (arg_p);
2851debfc3dSmrg /* Walk the ssa chain until we reach a ssa name we already
2861debfc3dSmrg created a value for or we reach a definition of the form
2871debfc3dSmrg ssa_name_n = &var; */
2881debfc3dSmrg while (TREE_CODE (arg) == SSA_NAME
2891debfc3dSmrg && !SSA_NAME_IS_DEFAULT_DEF (arg)
2901debfc3dSmrg && (SSA_NAME_VERSION (arg) >= n
2911debfc3dSmrg || phivn[SSA_NAME_VERSION (arg)].value == NULL_TREE))
2921debfc3dSmrg {
2931debfc3dSmrg gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
2941debfc3dSmrg if (!gimple_assign_single_p (def_stmt))
2951debfc3dSmrg return false;
2961debfc3dSmrg arg = gimple_assign_rhs1 (def_stmt);
2971debfc3dSmrg }
2981debfc3dSmrg if (TREE_CODE (arg) != ADDR_EXPR
2991debfc3dSmrg && !(TREE_CODE (arg) == SSA_NAME
3001debfc3dSmrg && SSA_NAME_VERSION (arg) < n
3011debfc3dSmrg && phivn[SSA_NAME_VERSION (arg)].value != NULL_TREE
3021debfc3dSmrg && (!type
3031debfc3dSmrg || types_compatible_p
3041debfc3dSmrg (type, TREE_TYPE (phivn[SSA_NAME_VERSION (arg)].value)))
3051debfc3dSmrg && phivn_valid_p (phivn, arg, bb)))
3061debfc3dSmrg return false;
3071debfc3dSmrg if (!type
3081debfc3dSmrg && TREE_CODE (arg) == SSA_NAME)
3091debfc3dSmrg type = TREE_TYPE (phivn[SSA_NAME_VERSION (arg)].value);
3101debfc3dSmrg }
3111debfc3dSmrg
3121debfc3dSmrg /* Find a dereferencing use. First follow (single use) ssa
3131debfc3dSmrg copy chains for ptr. */
3141debfc3dSmrg while (single_imm_use (ptr, &use, &use_stmt)
3151debfc3dSmrg && gimple_assign_ssa_name_copy_p (use_stmt))
3161debfc3dSmrg ptr = gimple_assign_lhs (use_stmt);
3171debfc3dSmrg
3181debfc3dSmrg /* Replace the first dereference of *ptr if there is one and if we
3191debfc3dSmrg can move the loads to the place of the ptr phi node. */
3201debfc3dSmrg phi_inserted = false;
3211debfc3dSmrg changed = false;
3221debfc3dSmrg FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
3231debfc3dSmrg {
3241debfc3dSmrg gimple *def_stmt;
3251debfc3dSmrg tree vuse;
3261debfc3dSmrg
3271debfc3dSmrg /* Only replace loads in blocks that post-dominate the PHI node. That
3281debfc3dSmrg makes sure we don't end up speculating loads. */
3291debfc3dSmrg if (!dominated_by_p (CDI_POST_DOMINATORS,
3301debfc3dSmrg bb, gimple_bb (use_stmt)))
3311debfc3dSmrg continue;
3321debfc3dSmrg
3331debfc3dSmrg /* Check whether this is a load of *ptr. */
3341debfc3dSmrg if (!(is_gimple_assign (use_stmt)
3351debfc3dSmrg && gimple_assign_rhs_code (use_stmt) == MEM_REF
3361debfc3dSmrg && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == ptr
3371debfc3dSmrg && integer_zerop (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 1))
3381debfc3dSmrg && (!type
3391debfc3dSmrg || types_compatible_p
3401debfc3dSmrg (TREE_TYPE (gimple_assign_lhs (use_stmt)), type))
341a05ac97eSmrg /* We cannot replace a load that may throw or is volatile.
342a05ac97eSmrg For volatiles the transform can change the number of
343a05ac97eSmrg executions if the load is inside a loop but the address
344a05ac97eSmrg computations outside (PR91812). We could relax this
345a05ac97eSmrg if we guard against that appropriately. For loads that can
346a05ac97eSmrg throw we could relax things if the moved loads all are
347a05ac97eSmrg known to not throw. */
348c0a68be4Smrg && !stmt_can_throw_internal (cfun, use_stmt)
349a05ac97eSmrg && !gimple_has_volatile_ops (use_stmt)))
3501debfc3dSmrg continue;
3511debfc3dSmrg
3521debfc3dSmrg /* Check if we can move the loads. The def stmt of the virtual use
3531debfc3dSmrg needs to be in a different basic block dominating bb. When the
3541debfc3dSmrg def is an edge-inserted one we know it dominates us. */
3551debfc3dSmrg vuse = gimple_vuse (use_stmt);
3561debfc3dSmrg def_stmt = SSA_NAME_DEF_STMT (vuse);
3571debfc3dSmrg if (!SSA_NAME_IS_DEFAULT_DEF (vuse)
3581debfc3dSmrg && (gimple_bb (def_stmt) == bb
3591debfc3dSmrg || (gimple_bb (def_stmt)
3601debfc3dSmrg && !dominated_by_p (CDI_DOMINATORS,
3611debfc3dSmrg bb, gimple_bb (def_stmt)))))
3621debfc3dSmrg goto next;
3631debfc3dSmrg
3641debfc3dSmrg /* Found a proper dereference with an aggregate copy. Just
3651debfc3dSmrg insert aggregate copies on the edges instead. */
3661debfc3dSmrg if (!is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (use_stmt))))
3671debfc3dSmrg {
3681debfc3dSmrg if (!gimple_vdef (use_stmt))
3691debfc3dSmrg goto next;
3701debfc3dSmrg
3711debfc3dSmrg /* As we replicate the lhs on each incoming edge all
3721debfc3dSmrg used SSA names have to be available there. */
3731debfc3dSmrg if (! for_each_index (gimple_assign_lhs_ptr (use_stmt),
3741debfc3dSmrg chk_uses,
3751debfc3dSmrg get_immediate_dominator (CDI_DOMINATORS,
3761debfc3dSmrg gimple_bb (phi))))
3771debfc3dSmrg goto next;
3781debfc3dSmrg
3791debfc3dSmrg gimple *vuse_stmt;
3801debfc3dSmrg imm_use_iterator vui;
3811debfc3dSmrg use_operand_p vuse_p;
3821debfc3dSmrg /* In order to move the aggregate copies earlier, make sure
3831debfc3dSmrg there are no statements that could read from memory
3841debfc3dSmrg aliasing the lhs in between the start of bb and use_stmt.
3851debfc3dSmrg As we require use_stmt to have a VDEF above, loads after
3861debfc3dSmrg use_stmt will use a different virtual SSA_NAME. */
3871debfc3dSmrg FOR_EACH_IMM_USE_FAST (vuse_p, vui, vuse)
3881debfc3dSmrg {
3891debfc3dSmrg vuse_stmt = USE_STMT (vuse_p);
3901debfc3dSmrg if (vuse_stmt == use_stmt)
3911debfc3dSmrg continue;
3921debfc3dSmrg if (!dominated_by_p (CDI_DOMINATORS,
3931debfc3dSmrg gimple_bb (vuse_stmt), bb))
3941debfc3dSmrg continue;
3951debfc3dSmrg if (ref_maybe_used_by_stmt_p (vuse_stmt,
3961debfc3dSmrg gimple_assign_lhs (use_stmt)))
3971debfc3dSmrg goto next;
3981debfc3dSmrg }
3991debfc3dSmrg
4001debfc3dSmrg phiprop_insert_phi (bb, phi, use_stmt, phivn, n);
4011debfc3dSmrg
4021debfc3dSmrg /* Remove old stmt. The phi is taken care of by DCE. */
4031debfc3dSmrg gsi = gsi_for_stmt (use_stmt);
4041debfc3dSmrg /* Unlinking the VDEF here is fine as we are sure that we process
4051debfc3dSmrg stmts in execution order due to aggregate copies having VDEFs
4061debfc3dSmrg and we emit loads on the edges in the very same order.
4071debfc3dSmrg We get multiple copies (or intermediate register loads) handled
4081debfc3dSmrg only by walking PHIs or immediate uses in a lucky order though,
4091debfc3dSmrg so we could signal the caller to re-start iterating over PHIs
4101debfc3dSmrg when we come here which would make it quadratic in the number
4111debfc3dSmrg of PHIs. */
4121debfc3dSmrg unlink_stmt_vdef (use_stmt);
4131debfc3dSmrg gsi_remove (&gsi, true);
4141debfc3dSmrg
4151debfc3dSmrg changed = true;
4161debfc3dSmrg }
4171debfc3dSmrg
4181debfc3dSmrg /* Found a proper dereference. Insert a phi node if this
4191debfc3dSmrg is the first load transformation. */
4201debfc3dSmrg else if (!phi_inserted)
4211debfc3dSmrg {
4221debfc3dSmrg res = phiprop_insert_phi (bb, phi, use_stmt, phivn, n);
4231debfc3dSmrg type = TREE_TYPE (res);
4241debfc3dSmrg
4251debfc3dSmrg /* Remember the value we created for *ptr. */
4261debfc3dSmrg phivn[SSA_NAME_VERSION (ptr)].value = res;
4271debfc3dSmrg phivn[SSA_NAME_VERSION (ptr)].vuse = vuse;
4281debfc3dSmrg
4291debfc3dSmrg /* Remove old stmt. The phi is taken care of by DCE, if we
4301debfc3dSmrg want to delete it here we also have to delete all intermediate
4311debfc3dSmrg copies. */
4321debfc3dSmrg gsi = gsi_for_stmt (use_stmt);
4331debfc3dSmrg gsi_remove (&gsi, true);
4341debfc3dSmrg
4351debfc3dSmrg phi_inserted = true;
4361debfc3dSmrg changed = true;
4371debfc3dSmrg }
4381debfc3dSmrg else
4391debfc3dSmrg {
4401debfc3dSmrg /* Further replacements are easy, just make a copy out of the
4411debfc3dSmrg load. */
4421debfc3dSmrg gimple_assign_set_rhs1 (use_stmt, res);
4431debfc3dSmrg update_stmt (use_stmt);
4441debfc3dSmrg changed = true;
4451debfc3dSmrg }
4461debfc3dSmrg
4471debfc3dSmrg next:;
4481debfc3dSmrg /* Continue searching for a proper dereference. */
4491debfc3dSmrg }
4501debfc3dSmrg
4511debfc3dSmrg return changed;
4521debfc3dSmrg }
4531debfc3dSmrg
4541debfc3dSmrg /* Main entry for phiprop pass. */
4551debfc3dSmrg
4561debfc3dSmrg namespace {
4571debfc3dSmrg
4581debfc3dSmrg const pass_data pass_data_phiprop =
4591debfc3dSmrg {
4601debfc3dSmrg GIMPLE_PASS, /* type */
4611debfc3dSmrg "phiprop", /* name */
4621debfc3dSmrg OPTGROUP_NONE, /* optinfo_flags */
4631debfc3dSmrg TV_TREE_PHIPROP, /* tv_id */
4641debfc3dSmrg ( PROP_cfg | PROP_ssa ), /* properties_required */
4651debfc3dSmrg 0, /* properties_provided */
4661debfc3dSmrg 0, /* properties_destroyed */
4671debfc3dSmrg 0, /* todo_flags_start */
4681debfc3dSmrg TODO_update_ssa, /* todo_flags_finish */
4691debfc3dSmrg };
4701debfc3dSmrg
4711debfc3dSmrg class pass_phiprop : public gimple_opt_pass
4721debfc3dSmrg {
4731debfc3dSmrg public:
pass_phiprop(gcc::context * ctxt)4741debfc3dSmrg pass_phiprop (gcc::context *ctxt)
4751debfc3dSmrg : gimple_opt_pass (pass_data_phiprop, ctxt)
4761debfc3dSmrg {}
4771debfc3dSmrg
4781debfc3dSmrg /* opt_pass methods: */
gate(function *)4791debfc3dSmrg virtual bool gate (function *) { return flag_tree_phiprop; }
4801debfc3dSmrg virtual unsigned int execute (function *);
4811debfc3dSmrg
4821debfc3dSmrg }; // class pass_phiprop
4831debfc3dSmrg
4841debfc3dSmrg unsigned int
execute(function * fun)4851debfc3dSmrg pass_phiprop::execute (function *fun)
4861debfc3dSmrg {
4871debfc3dSmrg vec<basic_block> bbs;
4881debfc3dSmrg struct phiprop_d *phivn;
4891debfc3dSmrg bool did_something = false;
4901debfc3dSmrg basic_block bb;
4911debfc3dSmrg gphi_iterator gsi;
4921debfc3dSmrg unsigned i;
4931debfc3dSmrg size_t n;
4941debfc3dSmrg
4951debfc3dSmrg calculate_dominance_info (CDI_DOMINATORS);
4961debfc3dSmrg calculate_dominance_info (CDI_POST_DOMINATORS);
4971debfc3dSmrg
4981debfc3dSmrg n = num_ssa_names;
4991debfc3dSmrg phivn = XCNEWVEC (struct phiprop_d, n);
5001debfc3dSmrg
5011debfc3dSmrg /* Walk the dominator tree in preorder. */
5021debfc3dSmrg bbs = get_all_dominated_blocks (CDI_DOMINATORS,
5031debfc3dSmrg single_succ (ENTRY_BLOCK_PTR_FOR_FN (fun)));
5041debfc3dSmrg FOR_EACH_VEC_ELT (bbs, i, bb)
505a05ac97eSmrg {
506a05ac97eSmrg /* Since we're going to move dereferences across predecessor
507a05ac97eSmrg edges avoid blocks with abnormal predecessors. */
508a05ac97eSmrg if (bb_has_abnormal_pred (bb))
509a05ac97eSmrg continue;
5101debfc3dSmrg for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5111debfc3dSmrg did_something |= propagate_with_phi (bb, gsi.phi (), phivn, n);
512a05ac97eSmrg }
5131debfc3dSmrg
5141debfc3dSmrg if (did_something)
5151debfc3dSmrg gsi_commit_edge_inserts ();
5161debfc3dSmrg
5171debfc3dSmrg bbs.release ();
5181debfc3dSmrg free (phivn);
5191debfc3dSmrg
5201debfc3dSmrg free_dominance_info (CDI_POST_DOMINATORS);
5211debfc3dSmrg
5221debfc3dSmrg return 0;
5231debfc3dSmrg }
5241debfc3dSmrg
5251debfc3dSmrg } // anon namespace
5261debfc3dSmrg
5271debfc3dSmrg gimple_opt_pass *
make_pass_phiprop(gcc::context * ctxt)5281debfc3dSmrg make_pass_phiprop (gcc::context *ctxt)
5291debfc3dSmrg {
5301debfc3dSmrg return new pass_phiprop (ctxt);
5311debfc3dSmrg }
532