xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-phiprop.c (revision f3cfa6f6ce31685c6c4a758bc430e69eb99f50a4)
1 /* Backward propagation of indirect loads through PHIs.
2    Copyright (C) 2007-2016 Free Software Foundation, Inc.
3    Contributed by Richard Guenther <rguenther@suse.de>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11 
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "tree-eh.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 
35 /* This pass propagates indirect loads through the PHI node for its
36    address to make the load source possibly non-addressable and to
37    allow for PHI optimization to trigger.
38 
39    For example the pass changes
40 
41      # addr_1 = PHI <&a, &b>
42      tmp_1 = *addr_1;
43 
44    to
45 
46      # tmp_1 = PHI <a, b>
47 
48    but also handles more complex scenarios like
49 
50      D.2077_2 = &this_1(D)->a1;
51      ...
52 
53      # b_12 = PHI <&c(2), D.2077_2(3)>
54      D.2114_13 = *b_12;
55      ...
56 
57      # b_15 = PHI <b_12(4), &b(5)>
58      D.2080_5 = &this_1(D)->a0;
59      ...
60 
61      # b_18 = PHI <D.2080_5(6), &c(7)>
62      ...
63 
64      # b_21 = PHI <b_15(8), b_18(9)>
65      D.2076_8 = *b_21;
66 
67    where the addresses loaded are defined by PHIs itself.
68    The above happens for
69 
70      std::max(std::min(a0, c), std::min(std::max(a1, c), b))
71 
72    where this pass transforms it to a form later PHI optimization
73    recognizes and transforms it to the simple
74 
75      D.2109_10 = this_1(D)->a1;
76      D.2110_11 = c;
77      D.2114_31 = MAX_EXPR <D.2109_10, D.2110_11>;
78      D.2115_14 = b;
79      D.2125_17 = MIN_EXPR <D.2115_14, D.2114_31>;
80      D.2119_16 = this_1(D)->a0;
81      D.2124_32 = MIN_EXPR <D.2110_11, D.2119_16>;
82      D.2076_33 = MAX_EXPR <D.2125_17, D.2124_32>;
83 
84    The pass does a dominator walk processing loads using a basic-block
85    local analysis and stores the result for use by transformations on
86    dominated basic-blocks.  */
87 
88 
89 /* Structure to keep track of the value of a dereferenced PHI result
90    and the virtual operand used for that dereference.  */
91 
92 struct phiprop_d
93 {
94   tree value;
95   tree vuse;
96 };
97 
98 /* Verify if the value recorded for NAME in PHIVN is still valid at
99    the start of basic block BB.  */
100 
101 static bool
102 phivn_valid_p (struct phiprop_d *phivn, tree name, basic_block bb)
103 {
104   tree vuse = phivn[SSA_NAME_VERSION (name)].vuse;
105   gimple *use_stmt;
106   imm_use_iterator ui2;
107   bool ok = true;
108 
109   /* The def stmts of the virtual uses need to be dominated by bb.  */
110   gcc_assert (vuse != NULL_TREE);
111 
112   FOR_EACH_IMM_USE_STMT (use_stmt, ui2, vuse)
113     {
114       /* If BB does not dominate a VDEF, the value is invalid.  */
115       if ((gimple_vdef (use_stmt) != NULL_TREE
116 	   || gimple_code (use_stmt) == GIMPLE_PHI)
117 	  && !dominated_by_p (CDI_DOMINATORS, gimple_bb (use_stmt), bb))
118 	{
119 	  ok = false;
120 	  BREAK_FROM_IMM_USE_STMT (ui2);
121 	}
122     }
123 
124   return ok;
125 }
126 
127 /* Insert a new phi node for the dereference of PHI at basic_block
128    BB with the virtual operands from USE_STMT.  */
129 
130 static tree
131 phiprop_insert_phi (basic_block bb, gphi *phi, gimple *use_stmt,
132 		    struct phiprop_d *phivn, size_t n)
133 {
134   tree res;
135   gphi *new_phi;
136   edge_iterator ei;
137   edge e;
138 
139   gcc_assert (is_gimple_assign (use_stmt)
140 	      && gimple_assign_rhs_code (use_stmt) == MEM_REF);
141 
142   /* Build a new PHI node to replace the definition of
143      the indirect reference lhs.  */
144   res = gimple_assign_lhs (use_stmt);
145   new_phi = create_phi_node (res, bb);
146 
147   if (dump_file && (dump_flags & TDF_DETAILS))
148     {
149       fprintf (dump_file, "Inserting PHI for result of load ");
150       print_gimple_stmt (dump_file, use_stmt, 0, 0);
151     }
152 
153   /* Add PHI arguments for each edge inserting loads of the
154      addressable operands.  */
155   FOR_EACH_EDGE (e, ei, bb->preds)
156     {
157       tree old_arg, new_var;
158       gassign *tmp;
159       source_location locus;
160 
161       old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
162       locus = gimple_phi_arg_location_from_edge (phi, e);
163       while (TREE_CODE (old_arg) == SSA_NAME
164 	     && (SSA_NAME_VERSION (old_arg) >= n
165 	         || phivn[SSA_NAME_VERSION (old_arg)].value == NULL_TREE))
166 	{
167 	  gimple *def_stmt = SSA_NAME_DEF_STMT (old_arg);
168 	  old_arg = gimple_assign_rhs1 (def_stmt);
169 	  locus = gimple_location (def_stmt);
170 	}
171 
172       if (TREE_CODE (old_arg) == SSA_NAME)
173 	{
174 	  if (dump_file && (dump_flags & TDF_DETAILS))
175 	    {
176 	      fprintf (dump_file, "  for edge defining ");
177 	      print_generic_expr (dump_file, PHI_ARG_DEF_FROM_EDGE (phi, e), 0);
178 	      fprintf (dump_file, " reusing PHI result ");
179 	      print_generic_expr (dump_file,
180 				  phivn[SSA_NAME_VERSION (old_arg)].value, 0);
181 	      fprintf (dump_file, "\n");
182 	    }
183 	  /* Reuse a formerly created dereference.  */
184 	  new_var = phivn[SSA_NAME_VERSION (old_arg)].value;
185 	}
186       else
187 	{
188 	  tree rhs = gimple_assign_rhs1 (use_stmt);
189 	  gcc_assert (TREE_CODE (old_arg) == ADDR_EXPR);
190 	  new_var = make_ssa_name (TREE_TYPE (rhs));
191 	  if (!is_gimple_min_invariant (old_arg))
192 	    old_arg = PHI_ARG_DEF_FROM_EDGE (phi, e);
193 	  else
194 	    old_arg = unshare_expr (old_arg);
195 	  tmp = gimple_build_assign (new_var,
196 				     fold_build2 (MEM_REF, TREE_TYPE (rhs),
197 						  old_arg,
198 						  TREE_OPERAND (rhs, 1)));
199 	  gimple_set_location (tmp, locus);
200 
201 	  gsi_insert_on_edge (e, tmp);
202 	  update_stmt (tmp);
203 
204 	  if (dump_file && (dump_flags & TDF_DETAILS))
205 	    {
206 	      fprintf (dump_file, "  for edge defining ");
207 	      print_generic_expr (dump_file, PHI_ARG_DEF_FROM_EDGE (phi, e), 0);
208 	      fprintf (dump_file, " inserting load ");
209 	      print_gimple_stmt (dump_file, tmp, 0, 0);
210 	    }
211 	}
212 
213       add_phi_arg (new_phi, new_var, e, locus);
214     }
215 
216   update_stmt (new_phi);
217 
218   if (dump_file && (dump_flags & TDF_DETAILS))
219     print_gimple_stmt (dump_file, new_phi, 0, 0);
220 
221   return res;
222 }
223 
224 /* Propagate between the phi node arguments of PHI in BB and phi result
225    users.  For now this matches
226         # p_2 = PHI <&x, &y>
227       <Lx>:;
228 	p_3 = p_2;
229 	z_2 = *p_3;
230    and converts it to
231 	# z_2 = PHI <x, y>
232       <Lx>:;
233    Returns true if a transformation was done and edge insertions
234    need to be committed.  Global data PHIVN and N is used to track
235    past transformation results.  We need to be especially careful here
236    with aliasing issues as we are moving memory reads.  */
237 
238 static bool
239 propagate_with_phi (basic_block bb, gphi *phi, struct phiprop_d *phivn,
240 		    size_t n)
241 {
242   tree ptr = PHI_RESULT (phi);
243   gimple *use_stmt;
244   tree res = NULL_TREE;
245   gimple_stmt_iterator gsi;
246   imm_use_iterator ui;
247   use_operand_p arg_p, use;
248   ssa_op_iter i;
249   bool phi_inserted;
250   tree type = NULL_TREE;
251 
252   if (!POINTER_TYPE_P (TREE_TYPE (ptr))
253       || !is_gimple_reg_type (TREE_TYPE (TREE_TYPE (ptr))))
254     return false;
255 
256   /* Check if we can "cheaply" dereference all phi arguments.  */
257   FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_USE)
258     {
259       tree arg = USE_FROM_PTR (arg_p);
260       /* Walk the ssa chain until we reach a ssa name we already
261 	 created a value for or we reach a definition of the form
262 	 ssa_name_n = &var;  */
263       while (TREE_CODE (arg) == SSA_NAME
264 	     && !SSA_NAME_IS_DEFAULT_DEF (arg)
265 	     && (SSA_NAME_VERSION (arg) >= n
266 	         || phivn[SSA_NAME_VERSION (arg)].value == NULL_TREE))
267 	{
268 	  gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
269 	  if (!gimple_assign_single_p (def_stmt))
270 	    return false;
271 	  arg = gimple_assign_rhs1 (def_stmt);
272 	}
273       if (TREE_CODE (arg) != ADDR_EXPR
274 	  && !(TREE_CODE (arg) == SSA_NAME
275 	       && SSA_NAME_VERSION (arg) < n
276 	       && phivn[SSA_NAME_VERSION (arg)].value != NULL_TREE
277 	       && (!type
278 		   || types_compatible_p
279 		       (type, TREE_TYPE (phivn[SSA_NAME_VERSION (arg)].value)))
280 	       && phivn_valid_p (phivn, arg, bb)))
281 	return false;
282       if (!type
283 	  && TREE_CODE (arg) == SSA_NAME)
284 	type = TREE_TYPE (phivn[SSA_NAME_VERSION (arg)].value);
285     }
286 
287   /* Find a dereferencing use.  First follow (single use) ssa
288      copy chains for ptr.  */
289   while (single_imm_use (ptr, &use, &use_stmt)
290 	 && gimple_assign_ssa_name_copy_p (use_stmt))
291     ptr = gimple_assign_lhs (use_stmt);
292 
293   /* Replace the first dereference of *ptr if there is one and if we
294      can move the loads to the place of the ptr phi node.  */
295   phi_inserted = false;
296   FOR_EACH_IMM_USE_STMT (use_stmt, ui, ptr)
297     {
298       gimple *def_stmt;
299       tree vuse;
300 
301       /* Only replace loads in blocks that post-dominate the PHI node.  That
302          makes sure we don't end up speculating loads.  */
303       if (!dominated_by_p (CDI_POST_DOMINATORS,
304 			   bb, gimple_bb (use_stmt)))
305 	continue;
306 
307       /* Check whether this is a load of *ptr.  */
308       if (!(is_gimple_assign (use_stmt)
309 	    && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
310 	    && gimple_assign_rhs_code (use_stmt) == MEM_REF
311 	    && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == ptr
312 	    && integer_zerop (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 1))
313 	    && (!type
314 		|| types_compatible_p
315 		     (TREE_TYPE (gimple_assign_lhs (use_stmt)), type))
316 	    /* We cannot replace a load that may throw or is volatile.  */
317 	    && !stmt_can_throw_internal (use_stmt)))
318 	continue;
319 
320       /* Check if we can move the loads.  The def stmt of the virtual use
321 	 needs to be in a different basic block dominating bb.  */
322       vuse = gimple_vuse (use_stmt);
323       def_stmt = SSA_NAME_DEF_STMT (vuse);
324       if (!SSA_NAME_IS_DEFAULT_DEF (vuse)
325 	  && (gimple_bb (def_stmt) == bb
326 	      || !dominated_by_p (CDI_DOMINATORS,
327 				  bb, gimple_bb (def_stmt))))
328 	goto next;
329 
330       /* Found a proper dereference.  Insert a phi node if this
331 	 is the first load transformation.  */
332       if (!phi_inserted)
333 	{
334 	  res = phiprop_insert_phi (bb, phi, use_stmt, phivn, n);
335 	  type = TREE_TYPE (res);
336 
337 	  /* Remember the value we created for *ptr.  */
338 	  phivn[SSA_NAME_VERSION (ptr)].value = res;
339 	  phivn[SSA_NAME_VERSION (ptr)].vuse = vuse;
340 
341 	  /* Remove old stmt.  The phi is taken care of by DCE, if we
342 	     want to delete it here we also have to delete all intermediate
343 	     copies.  */
344 	  gsi = gsi_for_stmt (use_stmt);
345 	  gsi_remove (&gsi, true);
346 
347 	  phi_inserted = true;
348 	}
349       else
350 	{
351 	  /* Further replacements are easy, just make a copy out of the
352 	     load.  */
353 	  gimple_assign_set_rhs1 (use_stmt, res);
354 	  update_stmt (use_stmt);
355 	}
356 
357 next:;
358       /* Continue searching for a proper dereference.  */
359     }
360 
361   return phi_inserted;
362 }
363 
364 /* Main entry for phiprop pass.  */
365 
366 namespace {
367 
368 const pass_data pass_data_phiprop =
369 {
370   GIMPLE_PASS, /* type */
371   "phiprop", /* name */
372   OPTGROUP_NONE, /* optinfo_flags */
373   TV_TREE_PHIPROP, /* tv_id */
374   ( PROP_cfg | PROP_ssa ), /* properties_required */
375   0, /* properties_provided */
376   0, /* properties_destroyed */
377   0, /* todo_flags_start */
378   TODO_update_ssa, /* todo_flags_finish */
379 };
380 
381 class pass_phiprop : public gimple_opt_pass
382 {
383 public:
384   pass_phiprop (gcc::context *ctxt)
385     : gimple_opt_pass (pass_data_phiprop, ctxt)
386   {}
387 
388   /* opt_pass methods: */
389   virtual bool gate (function *) { return flag_tree_phiprop; }
390   virtual unsigned int execute (function *);
391 
392 }; // class pass_phiprop
393 
394 unsigned int
395 pass_phiprop::execute (function *fun)
396 {
397   vec<basic_block> bbs;
398   struct phiprop_d *phivn;
399   bool did_something = false;
400   basic_block bb;
401   gphi_iterator gsi;
402   unsigned i;
403   size_t n;
404 
405   calculate_dominance_info (CDI_DOMINATORS);
406   calculate_dominance_info (CDI_POST_DOMINATORS);
407 
408   n = num_ssa_names;
409   phivn = XCNEWVEC (struct phiprop_d, n);
410 
411   /* Walk the dominator tree in preorder.  */
412   bbs = get_all_dominated_blocks (CDI_DOMINATORS,
413 				  single_succ (ENTRY_BLOCK_PTR_FOR_FN (fun)));
414   FOR_EACH_VEC_ELT (bbs, i, bb)
415     for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
416       did_something |= propagate_with_phi (bb, gsi.phi (), phivn, n);
417 
418   if (did_something)
419     gsi_commit_edge_inserts ();
420 
421   bbs.release ();
422   free (phivn);
423 
424   free_dominance_info (CDI_POST_DOMINATORS);
425 
426   return 0;
427 }
428 
429 } // anon namespace
430 
431 gimple_opt_pass *
432 make_pass_phiprop (gcc::context *ctxt)
433 {
434   return new pass_phiprop (ctxt);
435 }
436