xref: /dflybsd-src/contrib/gcc-8.0/gcc/tree-ssa-phiprop.c (revision 38fd149817dfbff97799f62fcb70be98c4e32523)
1*38fd1498Szrj /* Backward propagation of indirect loads through PHIs.
2*38fd1498Szrj    Copyright (C) 2007-2018 Free Software Foundation, Inc.
3*38fd1498Szrj    Contributed by Richard Guenther <rguenther@suse.de>
4*38fd1498Szrj 
5*38fd1498Szrj This file is part of GCC.
6*38fd1498Szrj 
7*38fd1498Szrj GCC is free software; you can redistribute it and/or modify
8*38fd1498Szrj it under the terms of the GNU General Public License as published by
9*38fd1498Szrj the Free Software Foundation; either version 3, or (at your option)
10*38fd1498Szrj any later version.
11*38fd1498Szrj 
12*38fd1498Szrj GCC is distributed in the hope that it will be useful,
13*38fd1498Szrj but WITHOUT ANY WARRANTY; without even the implied warranty of
14*38fd1498Szrj MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15*38fd1498Szrj GNU General Public License for more details.
16*38fd1498Szrj 
17*38fd1498Szrj You should have received a copy of the GNU General Public License
18*38fd1498Szrj along with GCC; see the file COPYING3.  If not see
19*38fd1498Szrj <http://www.gnu.org/licenses/>.  */
20*38fd1498Szrj 
21*38fd1498Szrj #include "config.h"
22*38fd1498Szrj #include "system.h"
23*38fd1498Szrj #include "coretypes.h"
24*38fd1498Szrj #include "backend.h"
25*38fd1498Szrj #include "tree.h"
26*38fd1498Szrj #include "gimple.h"
27*38fd1498Szrj #include "tree-pass.h"
28*38fd1498Szrj #include "ssa.h"
29*38fd1498Szrj #include "gimple-pretty-print.h"
30*38fd1498Szrj #include "fold-const.h"
31*38fd1498Szrj #include "tree-eh.h"
32*38fd1498Szrj #include "gimplify.h"
33*38fd1498Szrj #include "gimple-iterator.h"
34*38fd1498Szrj #include "stor-layout.h"
35*38fd1498Szrj #include "tree-ssa-loop.h"
36*38fd1498Szrj 
37*38fd1498Szrj /* This pass propagates indirect loads through the PHI node for its
38*38fd1498Szrj    address to make the load source possibly non-addressable and to
39*38fd1498Szrj    allow for PHI optimization to trigger.
40*38fd1498Szrj 
41*38fd1498Szrj    For example the pass changes
42*38fd1498Szrj 
43*38fd1498Szrj      # addr_1 = PHI <&a, &b>
44*38fd1498Szrj      tmp_1 = *addr_1;
45*38fd1498Szrj 
46*38fd1498Szrj    to
47*38fd1498Szrj 
48*38fd1498Szrj      # tmp_1 = PHI <a, b>
49*38fd1498Szrj 
50*38fd1498Szrj    but also handles more complex scenarios like
51*38fd1498Szrj 
52*38fd1498Szrj      D.2077_2 = &this_1(D)->a1;
53*38fd1498Szrj      ...
54*38fd1498Szrj 
55*38fd1498Szrj      # b_12 = PHI <&c(2), D.2077_2(3)>
56*38fd1498Szrj      D.2114_13 = *b_12;
57*38fd1498Szrj      ...
58*38fd1498Szrj 
59*38fd1498Szrj      # b_15 = PHI <b_12(4), &b(5)>
60*38fd1498Szrj      D.2080_5 = &this_1(D)->a0;
61*38fd1498Szrj      ...
62*38fd1498Szrj 
63*38fd1498Szrj      # b_18 = PHI <D.2080_5(6), &c(7)>
64*38fd1498Szrj      ...
65*38fd1498Szrj 
66*38fd1498Szrj      # b_21 = PHI <b_15(8), b_18(9)>
67*38fd1498Szrj      D.2076_8 = *b_21;
68*38fd1498Szrj 
69*38fd1498Szrj    where the addresses loaded are defined by PHIs itself.
70*38fd1498Szrj    The above happens for
71*38fd1498Szrj 
72*38fd1498Szrj      std::max(std::min(a0, c), std::min(std::max(a1, c), b))
73*38fd1498Szrj 
74*38fd1498Szrj    where this pass transforms it to a form later PHI optimization
75*38fd1498Szrj    recognizes and transforms it to the simple
76*38fd1498Szrj 
77*38fd1498Szrj      D.2109_10 = this_1(D)->a1;
78*38fd1498Szrj      D.2110_11 = c;
79*38fd1498Szrj      D.2114_31 = MAX_EXPR <D.2109_10, D.2110_11>;
80*38fd1498Szrj      D.2115_14 = b;
81*38fd1498Szrj      D.2125_17 = MIN_EXPR <D.2115_14, D.2114_31>;
82*38fd1498Szrj      D.2119_16 = this_1(D)->a0;
83*38fd1498Szrj      D.2124_32 = MIN_EXPR <D.2110_11, D.2119_16>;
84*38fd1498Szrj      D.2076_33 = MAX_EXPR <D.2125_17, D.2124_32>;
85*38fd1498Szrj 
86*38fd1498Szrj    The pass does a dominator walk processing loads using a basic-block
87*38fd1498Szrj    local analysis and stores the result for use by transformations on
88*38fd1498Szrj    dominated basic-blocks.  */
89*38fd1498Szrj 
90*38fd1498Szrj 
91*38fd1498Szrj /* Structure to keep track of the value of a dereferenced PHI result
92*38fd1498Szrj    and the virtual operand used for that dereference.  */
93*38fd1498Szrj 
94*38fd1498Szrj struct phiprop_d
95*38fd1498Szrj {
96*38fd1498Szrj   tree value;
97*38fd1498Szrj   tree vuse;
98*38fd1498Szrj };
99*38fd1498Szrj 
100*38fd1498Szrj /* Verify if the value recorded for NAME in PHIVN is still valid at
101*38fd1498Szrj    the start of basic block BB.  */
102*38fd1498Szrj 
103*38fd1498Szrj static bool
104*38fd1498Szrj phivn_valid_p (struct phiprop_d *phivn, tree name, basic_block bb)
105*38fd1498Szrj {
106*38fd1498Szrj   tree vuse = phivn[SSA_NAME_VERSION (name)].vuse;
107*38fd1498Szrj   gimple *use_stmt;
108*38fd1498Szrj   imm_use_iterator ui2;
109*38fd1498Szrj   bool ok = true;
110*38fd1498Szrj 
111*38fd1498Szrj   /* The def stmts of the virtual uses need to be dominated by bb.  */
112*38fd1498Szrj   gcc_assert (vuse != NULL_TREE);
113*38fd1498Szrj 
114*38fd1498Szrj   FOR_EACH_IMM_USE_STMT (use_stmt, ui2, vuse)
115*38fd1498Szrj     {
116*38fd1498Szrj       /* If BB does not dominate a VDEF, the value is invalid.  */
117*38fd1498Szrj       if ((gimple_vdef (use_stmt) != NULL_TREE
118*38fd1498Szrj 	   || gimple_code (use_stmt) == GIMPLE_PHI)
119*38fd1498Szrj 	  && !dominated_by_p (CDI_DOMINATORS, gimple_bb (use_stmt), bb))
120*38fd1498Szrj 	{
121*38fd1498Szrj 	  ok = false;
122*38fd1498Szrj 	  BREAK_FROM_IMM_USE_STMT (ui2);
123*38fd1498Szrj 	}
124*38fd1498Szrj     }
125*38fd1498Szrj 
126*38fd1498Szrj   return ok;
127*38fd1498Szrj }
128*38fd1498Szrj 
129*38fd1498Szrj /* Insert a new phi node for the dereference of PHI at basic_block
130*38fd1498Szrj    BB with the virtual operands from USE_STMT.  */
131*38fd1498Szrj 
132*38fd1498Szrj static tree
133*38fd1498Szrj phiprop_insert_phi (basic_block bb, gphi *phi, gimple *use_stmt,
134*38fd1498Szrj 		    struct phiprop_d *phivn, size_t n)
135*38fd1498Szrj {
136*38fd1498Szrj   tree res;
137*38fd1498Szrj   gphi *new_phi = NULL;
138*38fd1498Szrj   edge_iterator ei;
139*38fd1498Szrj   edge e;
140*38fd1498Szrj 
141*38fd1498Szrj   gcc_assert (is_gimple_assign (use_stmt)
142*38fd1498Szrj 	      && gimple_assign_rhs_code (use_stmt) == MEM_REF);
143*38fd1498Szrj 
144*38fd1498Szrj   /* Build a new PHI node to replace the definition of
145*38fd1498Szrj      the indirect reference lhs.  */
146*38fd1498Szrj   res = gimple_assign_lhs (use_stmt);
147*38fd1498Szrj   if (TREE_CODE (res) == SSA_NAME)
148*38fd1498Szrj     new_phi = create_phi_node (res, bb);
149*38fd1498Szrj 
150*38fd1498Szrj   if (dump_file && (dump_flags & TDF_DETAILS))
151*38fd1498Szrj     {
152*38fd1498Szrj       fprintf (dump_file, "Inserting PHI for result of load ");
153*38fd1498Szrj       print_gimple_stmt (dump_file, use_stmt, 0);
154*38fd1498Szrj     }
155*38fd1498Szrj 
156*38fd1498Szrj   /* Add PHI arguments for each edge inserting loads of the
157*38fd1498Szrj      addressable operands.  */
158*38fd1498Szrj   FOR_EACH_EDGE (e, ei, bb->preds)
159*38fd1498Szrj     {
160*38fd1498Szrj       tree old_arg, new_var;
161*38fd1498Szrj       gassign *tmp;
162*38fd1498Szrj       source_location locus;
163*38fd1498Szrj 
164*38fd1498Szrj       old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
165*38fd1498Szrj       locus = gimple_phi_arg_location_from_edge (phi, e);
166*38fd1498Szrj       while (TREE_CODE (old_arg) == SSA_NAME
167*38fd1498Szrj 	     && (SSA_NAME_VERSION (old_arg) >= n
168*38fd1498Szrj 	         || phivn[SSA_NAME_VERSION (old_arg)].value == NULL_TREE))
169*38fd1498Szrj 	{
170*38fd1498Szrj 	  gimple *def_stmt = SSA_NAME_DEF_STMT (old_arg);
171*38fd1498Szrj 	  old_arg = gimple_assign_rhs1 (def_stmt);
172*38fd1498Szrj 	  locus = gimple_location (def_stmt);
173*38fd1498Szrj 	}
174*38fd1498Szrj 
175*38fd1498Szrj       if (TREE_CODE (old_arg) == SSA_NAME)
176*38fd1498Szrj 	{
177*38fd1498Szrj 	  if (dump_file && (dump_flags & TDF_DETAILS))
178*38fd1498Szrj 	    {
179*38fd1498Szrj 	      fprintf (dump_file, "  for edge defining ");
180*38fd1498Szrj 	      print_generic_expr (dump_file, PHI_ARG_DEF_FROM_EDGE (phi, e));
181*38fd1498Szrj 	      fprintf (dump_file, " reusing PHI result ");
182*38fd1498Szrj 	      print_generic_expr (dump_file,
183*38fd1498Szrj 				  phivn[SSA_NAME_VERSION (old_arg)].value);
184*38fd1498Szrj 	      fprintf (dump_file, "\n");
185*38fd1498Szrj 	    }
186*38fd1498Szrj 	  /* Reuse a formerly created dereference.  */
187*38fd1498Szrj 	  new_var = phivn[SSA_NAME_VERSION (old_arg)].value;
188*38fd1498Szrj 	}
189*38fd1498Szrj       else
190*38fd1498Szrj 	{
191*38fd1498Szrj 	  tree rhs = gimple_assign_rhs1 (use_stmt);
192*38fd1498Szrj 	  gcc_assert (TREE_CODE (old_arg) == ADDR_EXPR);
193*38fd1498Szrj 	  if (TREE_CODE (res) == SSA_NAME)
194*38fd1498Szrj 	    new_var = make_ssa_name (TREE_TYPE (rhs));
195*38fd1498Szrj 	  else
196*38fd1498Szrj 	    new_var = unshare_expr (res);
197*38fd1498Szrj 	  if (!is_gimple_min_invariant (old_arg))
198*38fd1498Szrj 	    old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
199*38fd1498Szrj 	  else
200*38fd1498Szrj 	    old_arg = unshare_expr (old_arg);
201*38fd1498Szrj 	  tmp = gimple_build_assign (new_var,
202*38fd1498Szrj 				     fold_build2 (MEM_REF, TREE_TYPE (rhs),
203*38fd1498Szrj 						  old_arg,
204*38fd1498Szrj 						  TREE_OPERAND (rhs, 1)));
205*38fd1498Szrj 	  gimple_set_location (tmp, locus);
206*38fd1498Szrj 
207*38fd1498Szrj 	  gsi_insert_on_edge (e, tmp);
208*38fd1498Szrj 	  update_stmt (tmp);
209*38fd1498Szrj 
210*38fd1498Szrj 	  if (dump_file && (dump_flags & TDF_DETAILS))
211*38fd1498Szrj 	    {
212*38fd1498Szrj 	      fprintf (dump_file, "  for edge defining ");
213*38fd1498Szrj 	      print_generic_expr (dump_file, PHI_ARG_DEF_FROM_EDGE (phi, e));
214*38fd1498Szrj 	      fprintf (dump_file, " inserting load ");
215*38fd1498Szrj 	      print_gimple_stmt (dump_file, tmp, 0);
216*38fd1498Szrj 	    }
217*38fd1498Szrj 	}
218*38fd1498Szrj 
219*38fd1498Szrj       if (new_phi)
220*38fd1498Szrj 	add_phi_arg (new_phi, new_var, e, locus);
221*38fd1498Szrj     }
222*38fd1498Szrj 
223*38fd1498Szrj   if (new_phi)
224*38fd1498Szrj     {
225*38fd1498Szrj       update_stmt (new_phi);
226*38fd1498Szrj 
227*38fd1498Szrj       if (dump_file && (dump_flags & TDF_DETAILS))
228*38fd1498Szrj 	print_gimple_stmt (dump_file, new_phi, 0);
229*38fd1498Szrj     }
230*38fd1498Szrj 
231*38fd1498Szrj   return res;
232*38fd1498Szrj }
233*38fd1498Szrj 
234*38fd1498Szrj /* Verify if *idx is available at *DATA.  */
235*38fd1498Szrj 
236*38fd1498Szrj static bool
237*38fd1498Szrj chk_uses (tree, tree *idx, void *data)
238*38fd1498Szrj {
239*38fd1498Szrj   basic_block dom = (basic_block) data;
240*38fd1498Szrj   if (TREE_CODE (*idx) == SSA_NAME)
241*38fd1498Szrj     return (SSA_NAME_IS_DEFAULT_DEF (*idx)
242*38fd1498Szrj 	    || ! dominated_by_p (CDI_DOMINATORS,
243*38fd1498Szrj 				 gimple_bb (SSA_NAME_DEF_STMT (*idx)), dom));
244*38fd1498Szrj   return true;
245*38fd1498Szrj }
246*38fd1498Szrj 
247*38fd1498Szrj /* Propagate between the phi node arguments of PHI in BB and phi result
248*38fd1498Szrj    users.  For now this matches
249*38fd1498Szrj         # p_2 = PHI <&x, &y>
250*38fd1498Szrj       <Lx>:;
251*38fd1498Szrj 	p_3 = p_2;
252*38fd1498Szrj 	z_2 = *p_3;
253*38fd1498Szrj    and converts it to
254*38fd1498Szrj 	# z_2 = PHI <x, y>
255*38fd1498Szrj       <Lx>:;
256*38fd1498Szrj    Returns true if a transformation was done and edge insertions
257*38fd1498Szrj    need to be committed.  Global data PHIVN and N is used to track
258*38fd1498Szrj    past transformation results.  We need to be especially careful here
259*38fd1498Szrj    with aliasing issues as we are moving memory reads.  */
260*38fd1498Szrj 
261*38fd1498Szrj static bool
262*38fd1498Szrj propagate_with_phi (basic_block bb, gphi *phi, struct phiprop_d *phivn,
263*38fd1498Szrj 		    size_t n)
264*38fd1498Szrj {
265*38fd1498Szrj   tree ptr = PHI_RESULT (phi);
266*38fd1498Szrj   gimple *use_stmt;
267*38fd1498Szrj   tree res = NULL_TREE;
268*38fd1498Szrj   gimple_stmt_iterator gsi;
269*38fd1498Szrj   imm_use_iterator ui;
270*38fd1498Szrj   use_operand_p arg_p, use;
271*38fd1498Szrj   ssa_op_iter i;
272*38fd1498Szrj   bool phi_inserted;
273*38fd1498Szrj   bool changed;
274*38fd1498Szrj   tree type = NULL_TREE;
275*38fd1498Szrj 
276*38fd1498Szrj   if (!POINTER_TYPE_P (TREE_TYPE (ptr))
277*38fd1498Szrj       || (!is_gimple_reg_type (TREE_TYPE (TREE_TYPE (ptr)))
278*38fd1498Szrj 	  && TYPE_MODE (TREE_TYPE (TREE_TYPE (ptr))) == BLKmode))
279*38fd1498Szrj     return false;
280*38fd1498Szrj 
281*38fd1498Szrj   /* Check if we can "cheaply" dereference all phi arguments.  */
282*38fd1498Szrj   FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_USE)
283*38fd1498Szrj     {
284*38fd1498Szrj       tree arg = USE_FROM_PTR (arg_p);
285*38fd1498Szrj       /* Walk the ssa chain until we reach a ssa name we already
286*38fd1498Szrj 	 created a value for or we reach a definition of the form
287*38fd1498Szrj 	 ssa_name_n = &var;  */
288*38fd1498Szrj       while (TREE_CODE (arg) == SSA_NAME
289*38fd1498Szrj 	     && !SSA_NAME_IS_DEFAULT_DEF (arg)
290*38fd1498Szrj 	     && (SSA_NAME_VERSION (arg) >= n
291*38fd1498Szrj 	         || phivn[SSA_NAME_VERSION (arg)].value == NULL_TREE))
292*38fd1498Szrj 	{
293*38fd1498Szrj 	  gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
294*38fd1498Szrj 	  if (!gimple_assign_single_p (def_stmt))
295*38fd1498Szrj 	    return false;
296*38fd1498Szrj 	  arg = gimple_assign_rhs1 (def_stmt);
297*38fd1498Szrj 	}
298*38fd1498Szrj       if (TREE_CODE (arg) != ADDR_EXPR
299*38fd1498Szrj 	  && !(TREE_CODE (arg) == SSA_NAME
300*38fd1498Szrj 	       && SSA_NAME_VERSION (arg) < n
301*38fd1498Szrj 	       && phivn[SSA_NAME_VERSION (arg)].value != NULL_TREE
302*38fd1498Szrj 	       && (!type
303*38fd1498Szrj 		   || types_compatible_p
304*38fd1498Szrj 		       (type, TREE_TYPE (phivn[SSA_NAME_VERSION (arg)].value)))
305*38fd1498Szrj 	       && phivn_valid_p (phivn, arg, bb)))
306*38fd1498Szrj 	return false;
307*38fd1498Szrj       if (!type
308*38fd1498Szrj 	  && TREE_CODE (arg) == SSA_NAME)
309*38fd1498Szrj 	type = TREE_TYPE (phivn[SSA_NAME_VERSION (arg)].value);
310*38fd1498Szrj     }
311*38fd1498Szrj 
312*38fd1498Szrj   /* Find a dereferencing use.  First follow (single use) ssa
313*38fd1498Szrj      copy chains for ptr.  */
314*38fd1498Szrj   while (single_imm_use (ptr, &use, &use_stmt)
315*38fd1498Szrj 	 && gimple_assign_ssa_name_copy_p (use_stmt))
316*38fd1498Szrj     ptr = gimple_assign_lhs (use_stmt);
317*38fd1498Szrj 
318*38fd1498Szrj   /* Replace the first dereference of *ptr if there is one and if we
319*38fd1498Szrj      can move the loads to the place of the ptr phi node.  */
320*38fd1498Szrj   phi_inserted = false;
321*38fd1498Szrj   changed = false;
322*38fd1498Szrj   FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
323*38fd1498Szrj     {
324*38fd1498Szrj       gimple *def_stmt;
325*38fd1498Szrj       tree vuse;
326*38fd1498Szrj 
327*38fd1498Szrj       /* Only replace loads in blocks that post-dominate the PHI node.  That
328*38fd1498Szrj          makes sure we don't end up speculating loads.  */
329*38fd1498Szrj       if (!dominated_by_p (CDI_POST_DOMINATORS,
330*38fd1498Szrj 			   bb, gimple_bb (use_stmt)))
331*38fd1498Szrj 	continue;
332*38fd1498Szrj 
333*38fd1498Szrj       /* Check whether this is a load of *ptr.  */
334*38fd1498Szrj       if (!(is_gimple_assign (use_stmt)
335*38fd1498Szrj 	    && gimple_assign_rhs_code (use_stmt) == MEM_REF
336*38fd1498Szrj 	    && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == ptr
337*38fd1498Szrj 	    && integer_zerop (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 1))
338*38fd1498Szrj 	    && (!type
339*38fd1498Szrj 		|| types_compatible_p
340*38fd1498Szrj 		     (TREE_TYPE (gimple_assign_lhs (use_stmt)), type))
341*38fd1498Szrj 	    /* We cannot replace a load that may throw or is volatile.  */
342*38fd1498Szrj 	    && !stmt_can_throw_internal (use_stmt)))
343*38fd1498Szrj 	continue;
344*38fd1498Szrj 
345*38fd1498Szrj       /* Check if we can move the loads.  The def stmt of the virtual use
346*38fd1498Szrj 	 needs to be in a different basic block dominating bb.  When the
347*38fd1498Szrj 	 def is an edge-inserted one we know it dominates us.  */
348*38fd1498Szrj       vuse = gimple_vuse (use_stmt);
349*38fd1498Szrj       def_stmt = SSA_NAME_DEF_STMT (vuse);
350*38fd1498Szrj       if (!SSA_NAME_IS_DEFAULT_DEF (vuse)
351*38fd1498Szrj 	  && (gimple_bb (def_stmt) == bb
352*38fd1498Szrj 	      || (gimple_bb (def_stmt)
353*38fd1498Szrj 		  && !dominated_by_p (CDI_DOMINATORS,
354*38fd1498Szrj 				      bb, gimple_bb (def_stmt)))))
355*38fd1498Szrj 	goto next;
356*38fd1498Szrj 
357*38fd1498Szrj       /* Found a proper dereference with an aggregate copy.  Just
358*38fd1498Szrj          insert aggregate copies on the edges instead.  */
359*38fd1498Szrj       if (!is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (use_stmt))))
360*38fd1498Szrj 	{
361*38fd1498Szrj 	  if (!gimple_vdef (use_stmt))
362*38fd1498Szrj 	    goto next;
363*38fd1498Szrj 
364*38fd1498Szrj 	  /* As we replicate the lhs on each incoming edge all
365*38fd1498Szrj 	     used SSA names have to be available there.  */
366*38fd1498Szrj 	  if (! for_each_index (gimple_assign_lhs_ptr (use_stmt),
367*38fd1498Szrj 				chk_uses,
368*38fd1498Szrj 				get_immediate_dominator (CDI_DOMINATORS,
369*38fd1498Szrj 							 gimple_bb (phi))))
370*38fd1498Szrj 	    goto next;
371*38fd1498Szrj 
372*38fd1498Szrj 	  gimple *vuse_stmt;
373*38fd1498Szrj 	  imm_use_iterator vui;
374*38fd1498Szrj 	  use_operand_p vuse_p;
375*38fd1498Szrj 	  /* In order to move the aggregate copies earlier, make sure
376*38fd1498Szrj 	     there are no statements that could read from memory
377*38fd1498Szrj 	     aliasing the lhs in between the start of bb and use_stmt.
378*38fd1498Szrj 	     As we require use_stmt to have a VDEF above, loads after
379*38fd1498Szrj 	     use_stmt will use a different virtual SSA_NAME.  */
380*38fd1498Szrj 	  FOR_EACH_IMM_USE_FAST (vuse_p, vui, vuse)
381*38fd1498Szrj 	    {
382*38fd1498Szrj 	      vuse_stmt = USE_STMT (vuse_p);
383*38fd1498Szrj 	      if (vuse_stmt == use_stmt)
384*38fd1498Szrj 		continue;
385*38fd1498Szrj 	      if (!dominated_by_p (CDI_DOMINATORS,
386*38fd1498Szrj 				   gimple_bb (vuse_stmt), bb))
387*38fd1498Szrj 		continue;
388*38fd1498Szrj 	      if (ref_maybe_used_by_stmt_p (vuse_stmt,
389*38fd1498Szrj 					    gimple_assign_lhs (use_stmt)))
390*38fd1498Szrj 		goto next;
391*38fd1498Szrj 	    }
392*38fd1498Szrj 
393*38fd1498Szrj 	  phiprop_insert_phi (bb, phi, use_stmt, phivn, n);
394*38fd1498Szrj 
395*38fd1498Szrj 	  /* Remove old stmt.  The phi is taken care of by DCE.  */
396*38fd1498Szrj 	  gsi = gsi_for_stmt (use_stmt);
397*38fd1498Szrj 	  /* Unlinking the VDEF here is fine as we are sure that we process
398*38fd1498Szrj 	     stmts in execution order due to aggregate copies having VDEFs
399*38fd1498Szrj 	     and we emit loads on the edges in the very same order.
400*38fd1498Szrj 	     We get multiple copies (or intermediate register loads) handled
401*38fd1498Szrj 	     only by walking PHIs or immediate uses in a lucky order though,
402*38fd1498Szrj 	     so we could signal the caller to re-start iterating over PHIs
403*38fd1498Szrj 	     when we come here which would make it quadratic in the number
404*38fd1498Szrj 	     of PHIs.  */
405*38fd1498Szrj 	  unlink_stmt_vdef (use_stmt);
406*38fd1498Szrj 	  gsi_remove (&gsi, true);
407*38fd1498Szrj 
408*38fd1498Szrj 	  changed = true;
409*38fd1498Szrj 	}
410*38fd1498Szrj 
411*38fd1498Szrj       /* Found a proper dereference.  Insert a phi node if this
412*38fd1498Szrj 	 is the first load transformation.  */
413*38fd1498Szrj       else if (!phi_inserted)
414*38fd1498Szrj 	{
415*38fd1498Szrj 	  res = phiprop_insert_phi (bb, phi, use_stmt, phivn, n);
416*38fd1498Szrj 	  type = TREE_TYPE (res);
417*38fd1498Szrj 
418*38fd1498Szrj 	  /* Remember the value we created for *ptr.  */
419*38fd1498Szrj 	  phivn[SSA_NAME_VERSION (ptr)].value = res;
420*38fd1498Szrj 	  phivn[SSA_NAME_VERSION (ptr)].vuse = vuse;
421*38fd1498Szrj 
422*38fd1498Szrj 	  /* Remove old stmt.  The phi is taken care of by DCE, if we
423*38fd1498Szrj 	     want to delete it here we also have to delete all intermediate
424*38fd1498Szrj 	     copies.  */
425*38fd1498Szrj 	  gsi = gsi_for_stmt (use_stmt);
426*38fd1498Szrj 	  gsi_remove (&gsi, true);
427*38fd1498Szrj 
428*38fd1498Szrj 	  phi_inserted = true;
429*38fd1498Szrj 	  changed = true;
430*38fd1498Szrj 	}
431*38fd1498Szrj       else
432*38fd1498Szrj 	{
433*38fd1498Szrj 	  /* Further replacements are easy, just make a copy out of the
434*38fd1498Szrj 	     load.  */
435*38fd1498Szrj 	  gimple_assign_set_rhs1 (use_stmt, res);
436*38fd1498Szrj 	  update_stmt (use_stmt);
437*38fd1498Szrj 	  changed = true;
438*38fd1498Szrj 	}
439*38fd1498Szrj 
440*38fd1498Szrj next:;
441*38fd1498Szrj       /* Continue searching for a proper dereference.  */
442*38fd1498Szrj     }
443*38fd1498Szrj 
444*38fd1498Szrj   return changed;
445*38fd1498Szrj }
446*38fd1498Szrj 
447*38fd1498Szrj /* Main entry for phiprop pass.  */
448*38fd1498Szrj 
449*38fd1498Szrj namespace {
450*38fd1498Szrj 
451*38fd1498Szrj const pass_data pass_data_phiprop =
452*38fd1498Szrj {
453*38fd1498Szrj   GIMPLE_PASS, /* type */
454*38fd1498Szrj   "phiprop", /* name */
455*38fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
456*38fd1498Szrj   TV_TREE_PHIPROP, /* tv_id */
457*38fd1498Szrj   ( PROP_cfg | PROP_ssa ), /* properties_required */
458*38fd1498Szrj   0, /* properties_provided */
459*38fd1498Szrj   0, /* properties_destroyed */
460*38fd1498Szrj   0, /* todo_flags_start */
461*38fd1498Szrj   TODO_update_ssa, /* todo_flags_finish */
462*38fd1498Szrj };
463*38fd1498Szrj 
464*38fd1498Szrj class pass_phiprop : public gimple_opt_pass
465*38fd1498Szrj {
466*38fd1498Szrj public:
467*38fd1498Szrj   pass_phiprop (gcc::context *ctxt)
468*38fd1498Szrj     : gimple_opt_pass (pass_data_phiprop, ctxt)
469*38fd1498Szrj   {}
470*38fd1498Szrj 
471*38fd1498Szrj   /* opt_pass methods: */
472*38fd1498Szrj   virtual bool gate (function *) { return flag_tree_phiprop; }
473*38fd1498Szrj   virtual unsigned int execute (function *);
474*38fd1498Szrj 
475*38fd1498Szrj }; // class pass_phiprop
476*38fd1498Szrj 
477*38fd1498Szrj unsigned int
478*38fd1498Szrj pass_phiprop::execute (function *fun)
479*38fd1498Szrj {
480*38fd1498Szrj   vec<basic_block> bbs;
481*38fd1498Szrj   struct phiprop_d *phivn;
482*38fd1498Szrj   bool did_something = false;
483*38fd1498Szrj   basic_block bb;
484*38fd1498Szrj   gphi_iterator gsi;
485*38fd1498Szrj   unsigned i;
486*38fd1498Szrj   size_t n;
487*38fd1498Szrj 
488*38fd1498Szrj   calculate_dominance_info (CDI_DOMINATORS);
489*38fd1498Szrj   calculate_dominance_info (CDI_POST_DOMINATORS);
490*38fd1498Szrj 
491*38fd1498Szrj   n = num_ssa_names;
492*38fd1498Szrj   phivn = XCNEWVEC (struct phiprop_d, n);
493*38fd1498Szrj 
494*38fd1498Szrj   /* Walk the dominator tree in preorder.  */
495*38fd1498Szrj   bbs = get_all_dominated_blocks (CDI_DOMINATORS,
496*38fd1498Szrj 				  single_succ (ENTRY_BLOCK_PTR_FOR_FN (fun)));
497*38fd1498Szrj   FOR_EACH_VEC_ELT (bbs, i, bb)
498*38fd1498Szrj     for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
499*38fd1498Szrj       did_something |= propagate_with_phi (bb, gsi.phi (), phivn, n);
500*38fd1498Szrj 
501*38fd1498Szrj   if (did_something)
502*38fd1498Szrj     gsi_commit_edge_inserts ();
503*38fd1498Szrj 
504*38fd1498Szrj   bbs.release ();
505*38fd1498Szrj   free (phivn);
506*38fd1498Szrj 
507*38fd1498Szrj   free_dominance_info (CDI_POST_DOMINATORS);
508*38fd1498Szrj 
509*38fd1498Szrj   return 0;
510*38fd1498Szrj }
511*38fd1498Szrj 
512*38fd1498Szrj } // anon namespace
513*38fd1498Szrj 
514*38fd1498Szrj gimple_opt_pass *
515*38fd1498Szrj make_pass_phiprop (gcc::context *ctxt)
516*38fd1498Szrj {
517*38fd1498Szrj   return new pass_phiprop (ctxt);
518*38fd1498Szrj }
519