xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-loop-manip.c (revision 1debfc3d3fad8af6f31804271c18e67f77b4d718)
1*1debfc3dSmrg /* High-level loop manipulation functions.
2*1debfc3dSmrg    Copyright (C) 2004-2017 Free Software Foundation, Inc.
3*1debfc3dSmrg 
4*1debfc3dSmrg This file is part of GCC.
5*1debfc3dSmrg 
6*1debfc3dSmrg GCC is free software; you can redistribute it and/or modify it
7*1debfc3dSmrg under the terms of the GNU General Public License as published by the
8*1debfc3dSmrg Free Software Foundation; either version 3, or (at your option) any
9*1debfc3dSmrg later version.
10*1debfc3dSmrg 
11*1debfc3dSmrg GCC is distributed in the hope that it will be useful, but WITHOUT
12*1debfc3dSmrg ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13*1debfc3dSmrg FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14*1debfc3dSmrg for more details.
15*1debfc3dSmrg 
16*1debfc3dSmrg You should have received a copy of the GNU General Public License
17*1debfc3dSmrg along with GCC; see the file COPYING3.  If not see
18*1debfc3dSmrg <http://www.gnu.org/licenses/>.  */
19*1debfc3dSmrg 
20*1debfc3dSmrg #include "config.h"
21*1debfc3dSmrg #include "system.h"
22*1debfc3dSmrg #include "coretypes.h"
23*1debfc3dSmrg #include "backend.h"
24*1debfc3dSmrg #include "tree.h"
25*1debfc3dSmrg #include "gimple.h"
26*1debfc3dSmrg #include "cfghooks.h"
27*1debfc3dSmrg #include "tree-pass.h"	/* ??? for TODO_update_ssa but this isn't a pass.  */
28*1debfc3dSmrg #include "ssa.h"
29*1debfc3dSmrg #include "gimple-pretty-print.h"
30*1debfc3dSmrg #include "fold-const.h"
31*1debfc3dSmrg #include "cfganal.h"
32*1debfc3dSmrg #include "gimplify.h"
33*1debfc3dSmrg #include "gimple-iterator.h"
34*1debfc3dSmrg #include "gimplify-me.h"
35*1debfc3dSmrg #include "tree-cfg.h"
36*1debfc3dSmrg #include "tree-ssa-loop-ivopts.h"
37*1debfc3dSmrg #include "tree-ssa-loop-manip.h"
38*1debfc3dSmrg #include "tree-ssa-loop-niter.h"
39*1debfc3dSmrg #include "tree-ssa-loop.h"
40*1debfc3dSmrg #include "tree-into-ssa.h"
41*1debfc3dSmrg #include "tree-ssa.h"
42*1debfc3dSmrg #include "cfgloop.h"
43*1debfc3dSmrg #include "tree-scalar-evolution.h"
44*1debfc3dSmrg #include "params.h"
45*1debfc3dSmrg #include "tree-inline.h"
46*1debfc3dSmrg 
47*1debfc3dSmrg /* All bitmaps for rewriting into loop-closed SSA go on this obstack,
48*1debfc3dSmrg    so that we can free them all at once.  */
49*1debfc3dSmrg static bitmap_obstack loop_renamer_obstack;
50*1debfc3dSmrg 
51*1debfc3dSmrg /* Creates an induction variable with value BASE + STEP * iteration in LOOP.
52*1debfc3dSmrg    It is expected that neither BASE nor STEP are shared with other expressions
53*1debfc3dSmrg    (unless the sharing rules allow this).  Use VAR as a base var_decl for it
54*1debfc3dSmrg    (if NULL, a new temporary will be created).  The increment will occur at
55*1debfc3dSmrg    INCR_POS (after it if AFTER is true, before it otherwise).  INCR_POS and
56*1debfc3dSmrg    AFTER can be computed using standard_iv_increment_position.  The ssa versions
57*1debfc3dSmrg    of the variable before and after increment will be stored in VAR_BEFORE and
58*1debfc3dSmrg    VAR_AFTER (unless they are NULL).  */
59*1debfc3dSmrg 
60*1debfc3dSmrg void
61*1debfc3dSmrg create_iv (tree base, tree step, tree var, struct loop *loop,
62*1debfc3dSmrg 	   gimple_stmt_iterator *incr_pos, bool after,
63*1debfc3dSmrg 	   tree *var_before, tree *var_after)
64*1debfc3dSmrg {
65*1debfc3dSmrg   gassign *stmt;
66*1debfc3dSmrg   gphi *phi;
67*1debfc3dSmrg   tree initial, step1;
68*1debfc3dSmrg   gimple_seq stmts;
69*1debfc3dSmrg   tree vb, va;
70*1debfc3dSmrg   enum tree_code incr_op = PLUS_EXPR;
71*1debfc3dSmrg   edge pe = loop_preheader_edge (loop);
72*1debfc3dSmrg 
73*1debfc3dSmrg   if (var != NULL_TREE)
74*1debfc3dSmrg     {
75*1debfc3dSmrg       vb = make_ssa_name (var);
76*1debfc3dSmrg       va = make_ssa_name (var);
77*1debfc3dSmrg     }
78*1debfc3dSmrg   else
79*1debfc3dSmrg     {
80*1debfc3dSmrg       vb = make_temp_ssa_name (TREE_TYPE (base), NULL, "ivtmp");
81*1debfc3dSmrg       va = make_temp_ssa_name (TREE_TYPE (base), NULL, "ivtmp");
82*1debfc3dSmrg     }
83*1debfc3dSmrg   if (var_before)
84*1debfc3dSmrg     *var_before = vb;
85*1debfc3dSmrg   if (var_after)
86*1debfc3dSmrg     *var_after = va;
87*1debfc3dSmrg 
88*1debfc3dSmrg   /* For easier readability of the created code, produce MINUS_EXPRs
89*1debfc3dSmrg      when suitable.  */
90*1debfc3dSmrg   if (TREE_CODE (step) == INTEGER_CST)
91*1debfc3dSmrg     {
92*1debfc3dSmrg       if (TYPE_UNSIGNED (TREE_TYPE (step)))
93*1debfc3dSmrg 	{
94*1debfc3dSmrg 	  step1 = fold_build1 (NEGATE_EXPR, TREE_TYPE (step), step);
95*1debfc3dSmrg 	  if (tree_int_cst_lt (step1, step))
96*1debfc3dSmrg 	    {
97*1debfc3dSmrg 	      incr_op = MINUS_EXPR;
98*1debfc3dSmrg 	      step = step1;
99*1debfc3dSmrg 	    }
100*1debfc3dSmrg 	}
101*1debfc3dSmrg       else
102*1debfc3dSmrg 	{
103*1debfc3dSmrg 	  bool ovf;
104*1debfc3dSmrg 
105*1debfc3dSmrg 	  if (!tree_expr_nonnegative_warnv_p (step, &ovf)
106*1debfc3dSmrg 	      && may_negate_without_overflow_p (step))
107*1debfc3dSmrg 	    {
108*1debfc3dSmrg 	      incr_op = MINUS_EXPR;
109*1debfc3dSmrg 	      step = fold_build1 (NEGATE_EXPR, TREE_TYPE (step), step);
110*1debfc3dSmrg 	    }
111*1debfc3dSmrg 	}
112*1debfc3dSmrg     }
113*1debfc3dSmrg   if (POINTER_TYPE_P (TREE_TYPE (base)))
114*1debfc3dSmrg     {
115*1debfc3dSmrg       if (TREE_CODE (base) == ADDR_EXPR)
116*1debfc3dSmrg 	mark_addressable (TREE_OPERAND (base, 0));
117*1debfc3dSmrg       step = convert_to_ptrofftype (step);
118*1debfc3dSmrg       if (incr_op == MINUS_EXPR)
119*1debfc3dSmrg 	step = fold_build1 (NEGATE_EXPR, TREE_TYPE (step), step);
120*1debfc3dSmrg       incr_op = POINTER_PLUS_EXPR;
121*1debfc3dSmrg     }
122*1debfc3dSmrg   /* Gimplify the step if necessary.  We put the computations in front of the
123*1debfc3dSmrg      loop (i.e. the step should be loop invariant).  */
124*1debfc3dSmrg   step = force_gimple_operand (step, &stmts, true, NULL_TREE);
125*1debfc3dSmrg   if (stmts)
126*1debfc3dSmrg     gsi_insert_seq_on_edge_immediate (pe, stmts);
127*1debfc3dSmrg 
128*1debfc3dSmrg   stmt = gimple_build_assign (va, incr_op, vb, step);
129*1debfc3dSmrg   if (after)
130*1debfc3dSmrg     gsi_insert_after (incr_pos, stmt, GSI_NEW_STMT);
131*1debfc3dSmrg   else
132*1debfc3dSmrg     gsi_insert_before (incr_pos, stmt, GSI_NEW_STMT);
133*1debfc3dSmrg 
134*1debfc3dSmrg   initial = force_gimple_operand (base, &stmts, true, var);
135*1debfc3dSmrg   if (stmts)
136*1debfc3dSmrg     gsi_insert_seq_on_edge_immediate (pe, stmts);
137*1debfc3dSmrg 
138*1debfc3dSmrg   phi = create_phi_node (vb, loop->header);
139*1debfc3dSmrg   add_phi_arg (phi, initial, loop_preheader_edge (loop), UNKNOWN_LOCATION);
140*1debfc3dSmrg   add_phi_arg (phi, va, loop_latch_edge (loop), UNKNOWN_LOCATION);
141*1debfc3dSmrg }
142*1debfc3dSmrg 
143*1debfc3dSmrg /* Return the innermost superloop LOOP of USE_LOOP that is a superloop of
144*1debfc3dSmrg    both DEF_LOOP and USE_LOOP.  */
145*1debfc3dSmrg 
146*1debfc3dSmrg static inline struct loop *
147*1debfc3dSmrg find_sibling_superloop (struct loop *use_loop, struct loop *def_loop)
148*1debfc3dSmrg {
149*1debfc3dSmrg   unsigned ud = loop_depth (use_loop);
150*1debfc3dSmrg   unsigned dd = loop_depth (def_loop);
151*1debfc3dSmrg   gcc_assert (ud > 0 && dd > 0);
152*1debfc3dSmrg   if (ud > dd)
153*1debfc3dSmrg     use_loop = superloop_at_depth (use_loop, dd);
154*1debfc3dSmrg   if (ud < dd)
155*1debfc3dSmrg     def_loop = superloop_at_depth (def_loop, ud);
156*1debfc3dSmrg   while (loop_outer (use_loop) != loop_outer (def_loop))
157*1debfc3dSmrg     {
158*1debfc3dSmrg       use_loop = loop_outer (use_loop);
159*1debfc3dSmrg       def_loop = loop_outer (def_loop);
160*1debfc3dSmrg       gcc_assert (use_loop && def_loop);
161*1debfc3dSmrg     }
162*1debfc3dSmrg   return use_loop;
163*1debfc3dSmrg }
164*1debfc3dSmrg 
165*1debfc3dSmrg /* DEF_BB is a basic block containing a DEF that needs rewriting into
166*1debfc3dSmrg    loop-closed SSA form.  USE_BLOCKS is the set of basic blocks containing
167*1debfc3dSmrg    uses of DEF that "escape" from the loop containing DEF_BB (i.e. blocks in
168*1debfc3dSmrg    USE_BLOCKS are dominated by DEF_BB but not in the loop father of DEF_B).
169*1debfc3dSmrg    ALL_EXITS[I] is the set of all basic blocks that exit loop I.
170*1debfc3dSmrg 
171*1debfc3dSmrg    Compute the subset of LOOP_EXITS that exit the loop containing DEF_BB
172*1debfc3dSmrg    or one of its loop fathers, in which DEF is live.  This set is returned
173*1debfc3dSmrg    in the bitmap LIVE_EXITS.
174*1debfc3dSmrg 
175*1debfc3dSmrg    Instead of computing the complete livein set of the def, we use the loop
176*1debfc3dSmrg    nesting tree as a form of poor man's structure analysis.  This greatly
177*1debfc3dSmrg    speeds up the analysis, which is important because this function may be
178*1debfc3dSmrg    called on all SSA names that need rewriting, one at a time.  */
179*1debfc3dSmrg 
180*1debfc3dSmrg static void
181*1debfc3dSmrg compute_live_loop_exits (bitmap live_exits, bitmap use_blocks,
182*1debfc3dSmrg 			 bitmap *loop_exits, basic_block def_bb)
183*1debfc3dSmrg {
184*1debfc3dSmrg   unsigned i;
185*1debfc3dSmrg   bitmap_iterator bi;
186*1debfc3dSmrg   struct loop *def_loop = def_bb->loop_father;
187*1debfc3dSmrg   unsigned def_loop_depth = loop_depth (def_loop);
188*1debfc3dSmrg   bitmap def_loop_exits;
189*1debfc3dSmrg 
190*1debfc3dSmrg   /* Normally the work list size is bounded by the number of basic
191*1debfc3dSmrg      blocks in the largest loop.  We don't know this number, but we
192*1debfc3dSmrg      can be fairly sure that it will be relatively small.  */
193*1debfc3dSmrg   auto_vec<basic_block> worklist (MAX (8, n_basic_blocks_for_fn (cfun) / 128));
194*1debfc3dSmrg 
195*1debfc3dSmrg   EXECUTE_IF_SET_IN_BITMAP (use_blocks, 0, i, bi)
196*1debfc3dSmrg     {
197*1debfc3dSmrg       basic_block use_bb = BASIC_BLOCK_FOR_FN (cfun, i);
198*1debfc3dSmrg       struct loop *use_loop = use_bb->loop_father;
199*1debfc3dSmrg       gcc_checking_assert (def_loop != use_loop
200*1debfc3dSmrg 			   && ! flow_loop_nested_p (def_loop, use_loop));
201*1debfc3dSmrg       if (! flow_loop_nested_p (use_loop, def_loop))
202*1debfc3dSmrg 	use_bb = find_sibling_superloop (use_loop, def_loop)->header;
203*1debfc3dSmrg       if (bitmap_set_bit (live_exits, use_bb->index))
204*1debfc3dSmrg 	worklist.safe_push (use_bb);
205*1debfc3dSmrg     }
206*1debfc3dSmrg 
207*1debfc3dSmrg   /* Iterate until the worklist is empty.  */
208*1debfc3dSmrg   while (! worklist.is_empty ())
209*1debfc3dSmrg     {
210*1debfc3dSmrg       edge e;
211*1debfc3dSmrg       edge_iterator ei;
212*1debfc3dSmrg 
213*1debfc3dSmrg       /* Pull a block off the worklist.  */
214*1debfc3dSmrg       basic_block bb = worklist.pop ();
215*1debfc3dSmrg 
216*1debfc3dSmrg       /* Make sure we have at least enough room in the work list
217*1debfc3dSmrg 	 for all predecessors of this block.  */
218*1debfc3dSmrg       worklist.reserve (EDGE_COUNT (bb->preds));
219*1debfc3dSmrg 
220*1debfc3dSmrg       /* For each predecessor block.  */
221*1debfc3dSmrg       FOR_EACH_EDGE (e, ei, bb->preds)
222*1debfc3dSmrg 	{
223*1debfc3dSmrg 	  basic_block pred = e->src;
224*1debfc3dSmrg 	  struct loop *pred_loop = pred->loop_father;
225*1debfc3dSmrg 	  unsigned pred_loop_depth = loop_depth (pred_loop);
226*1debfc3dSmrg 	  bool pred_visited;
227*1debfc3dSmrg 
228*1debfc3dSmrg 	  /* We should have met DEF_BB along the way.  */
229*1debfc3dSmrg 	  gcc_assert (pred != ENTRY_BLOCK_PTR_FOR_FN (cfun));
230*1debfc3dSmrg 
231*1debfc3dSmrg 	  if (pred_loop_depth >= def_loop_depth)
232*1debfc3dSmrg 	    {
233*1debfc3dSmrg 	      if (pred_loop_depth > def_loop_depth)
234*1debfc3dSmrg 		pred_loop = superloop_at_depth (pred_loop, def_loop_depth);
235*1debfc3dSmrg 	      /* If we've reached DEF_LOOP, our train ends here.  */
236*1debfc3dSmrg 	      if (pred_loop == def_loop)
237*1debfc3dSmrg 		continue;
238*1debfc3dSmrg 	    }
239*1debfc3dSmrg 	  else if (! flow_loop_nested_p (pred_loop, def_loop))
240*1debfc3dSmrg 	    pred = find_sibling_superloop (pred_loop, def_loop)->header;
241*1debfc3dSmrg 
242*1debfc3dSmrg 	  /* Add PRED to the LIVEIN set.  PRED_VISITED is true if
243*1debfc3dSmrg 	     we had already added PRED to LIVEIN before.  */
244*1debfc3dSmrg 	  pred_visited = !bitmap_set_bit (live_exits, pred->index);
245*1debfc3dSmrg 
246*1debfc3dSmrg 	  /* If we have visited PRED before, don't add it to the worklist.
247*1debfc3dSmrg 	     If BB dominates PRED, then we're probably looking at a loop.
248*1debfc3dSmrg 	     We're only interested in looking up in the dominance tree
249*1debfc3dSmrg 	     because DEF_BB dominates all the uses.  */
250*1debfc3dSmrg 	  if (pred_visited || dominated_by_p (CDI_DOMINATORS, pred, bb))
251*1debfc3dSmrg 	    continue;
252*1debfc3dSmrg 
253*1debfc3dSmrg 	  worklist.quick_push (pred);
254*1debfc3dSmrg 	}
255*1debfc3dSmrg     }
256*1debfc3dSmrg 
257*1debfc3dSmrg   def_loop_exits = BITMAP_ALLOC (&loop_renamer_obstack);
258*1debfc3dSmrg   for (struct loop *loop = def_loop;
259*1debfc3dSmrg        loop != current_loops->tree_root;
260*1debfc3dSmrg        loop = loop_outer (loop))
261*1debfc3dSmrg     bitmap_ior_into (def_loop_exits, loop_exits[loop->num]);
262*1debfc3dSmrg   bitmap_and_into (live_exits, def_loop_exits);
263*1debfc3dSmrg   BITMAP_FREE (def_loop_exits);
264*1debfc3dSmrg }
265*1debfc3dSmrg 
266*1debfc3dSmrg /* Add a loop-closing PHI for VAR in basic block EXIT.  */
267*1debfc3dSmrg 
268*1debfc3dSmrg static void
269*1debfc3dSmrg add_exit_phi (basic_block exit, tree var)
270*1debfc3dSmrg {
271*1debfc3dSmrg   gphi *phi;
272*1debfc3dSmrg   edge e;
273*1debfc3dSmrg   edge_iterator ei;
274*1debfc3dSmrg 
275*1debfc3dSmrg   /* Check that at least one of the edges entering the EXIT block exits
276*1debfc3dSmrg      the loop, or a superloop of that loop, that VAR is defined in.  */
277*1debfc3dSmrg   if (flag_checking)
278*1debfc3dSmrg     {
279*1debfc3dSmrg       gimple *def_stmt = SSA_NAME_DEF_STMT (var);
280*1debfc3dSmrg       basic_block def_bb = gimple_bb (def_stmt);
281*1debfc3dSmrg       FOR_EACH_EDGE (e, ei, exit->preds)
282*1debfc3dSmrg 	{
283*1debfc3dSmrg 	  struct loop *aloop = find_common_loop (def_bb->loop_father,
284*1debfc3dSmrg 						 e->src->loop_father);
285*1debfc3dSmrg 	  if (!flow_bb_inside_loop_p (aloop, e->dest))
286*1debfc3dSmrg 	    break;
287*1debfc3dSmrg 	}
288*1debfc3dSmrg       gcc_assert (e);
289*1debfc3dSmrg     }
290*1debfc3dSmrg 
291*1debfc3dSmrg   phi = create_phi_node (NULL_TREE, exit);
292*1debfc3dSmrg   create_new_def_for (var, phi, gimple_phi_result_ptr (phi));
293*1debfc3dSmrg   FOR_EACH_EDGE (e, ei, exit->preds)
294*1debfc3dSmrg     add_phi_arg (phi, var, e, UNKNOWN_LOCATION);
295*1debfc3dSmrg 
296*1debfc3dSmrg   if (dump_file && (dump_flags & TDF_DETAILS))
297*1debfc3dSmrg     {
298*1debfc3dSmrg       fprintf (dump_file, ";; Created LCSSA PHI: ");
299*1debfc3dSmrg       print_gimple_stmt (dump_file, phi, 0, dump_flags);
300*1debfc3dSmrg     }
301*1debfc3dSmrg }
302*1debfc3dSmrg 
303*1debfc3dSmrg /* Add exit phis for VAR that is used in LIVEIN.
304*1debfc3dSmrg    Exits of the loops are stored in LOOP_EXITS.  */
305*1debfc3dSmrg 
306*1debfc3dSmrg static void
307*1debfc3dSmrg add_exit_phis_var (tree var, bitmap use_blocks, bitmap *loop_exits)
308*1debfc3dSmrg {
309*1debfc3dSmrg   unsigned index;
310*1debfc3dSmrg   bitmap_iterator bi;
311*1debfc3dSmrg   basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (var));
312*1debfc3dSmrg   bitmap live_exits = BITMAP_ALLOC (&loop_renamer_obstack);
313*1debfc3dSmrg 
314*1debfc3dSmrg   gcc_checking_assert (! bitmap_bit_p (use_blocks, def_bb->index));
315*1debfc3dSmrg 
316*1debfc3dSmrg   compute_live_loop_exits (live_exits, use_blocks, loop_exits, def_bb);
317*1debfc3dSmrg 
318*1debfc3dSmrg   EXECUTE_IF_SET_IN_BITMAP (live_exits, 0, index, bi)
319*1debfc3dSmrg     {
320*1debfc3dSmrg       add_exit_phi (BASIC_BLOCK_FOR_FN (cfun, index), var);
321*1debfc3dSmrg     }
322*1debfc3dSmrg 
323*1debfc3dSmrg   BITMAP_FREE (live_exits);
324*1debfc3dSmrg }
325*1debfc3dSmrg 
326*1debfc3dSmrg /* Add exit phis for the names marked in NAMES_TO_RENAME.
327*1debfc3dSmrg    Exits of the loops are stored in EXITS.  Sets of blocks where the ssa
328*1debfc3dSmrg    names are used are stored in USE_BLOCKS.  */
329*1debfc3dSmrg 
330*1debfc3dSmrg static void
331*1debfc3dSmrg add_exit_phis (bitmap names_to_rename, bitmap *use_blocks, bitmap *loop_exits)
332*1debfc3dSmrg {
333*1debfc3dSmrg   unsigned i;
334*1debfc3dSmrg   bitmap_iterator bi;
335*1debfc3dSmrg 
336*1debfc3dSmrg   EXECUTE_IF_SET_IN_BITMAP (names_to_rename, 0, i, bi)
337*1debfc3dSmrg     {
338*1debfc3dSmrg       add_exit_phis_var (ssa_name (i), use_blocks[i], loop_exits);
339*1debfc3dSmrg     }
340*1debfc3dSmrg }
341*1debfc3dSmrg 
342*1debfc3dSmrg /* Fill the array of bitmaps LOOP_EXITS with all loop exit edge targets.  */
343*1debfc3dSmrg 
344*1debfc3dSmrg static void
345*1debfc3dSmrg get_loops_exits (bitmap *loop_exits)
346*1debfc3dSmrg {
347*1debfc3dSmrg   struct loop *loop;
348*1debfc3dSmrg   unsigned j;
349*1debfc3dSmrg   edge e;
350*1debfc3dSmrg 
351*1debfc3dSmrg   FOR_EACH_LOOP (loop, 0)
352*1debfc3dSmrg     {
353*1debfc3dSmrg       vec<edge> exit_edges = get_loop_exit_edges (loop);
354*1debfc3dSmrg       loop_exits[loop->num] = BITMAP_ALLOC (&loop_renamer_obstack);
355*1debfc3dSmrg       FOR_EACH_VEC_ELT (exit_edges, j, e)
356*1debfc3dSmrg         bitmap_set_bit (loop_exits[loop->num], e->dest->index);
357*1debfc3dSmrg       exit_edges.release ();
358*1debfc3dSmrg     }
359*1debfc3dSmrg }
360*1debfc3dSmrg 
361*1debfc3dSmrg /* For USE in BB, if it is used outside of the loop it is defined in,
362*1debfc3dSmrg    mark it for rewrite.  Record basic block BB where it is used
363*1debfc3dSmrg    to USE_BLOCKS.  Record the ssa name index to NEED_PHIS bitmap.
364*1debfc3dSmrg    Note that for USEs in phis, BB should be the src of the edge corresponding to
365*1debfc3dSmrg    the use, rather than the bb containing the phi.  */
366*1debfc3dSmrg 
367*1debfc3dSmrg static void
368*1debfc3dSmrg find_uses_to_rename_use (basic_block bb, tree use, bitmap *use_blocks,
369*1debfc3dSmrg 			 bitmap need_phis)
370*1debfc3dSmrg {
371*1debfc3dSmrg   unsigned ver;
372*1debfc3dSmrg   basic_block def_bb;
373*1debfc3dSmrg   struct loop *def_loop;
374*1debfc3dSmrg 
375*1debfc3dSmrg   if (TREE_CODE (use) != SSA_NAME)
376*1debfc3dSmrg     return;
377*1debfc3dSmrg 
378*1debfc3dSmrg   ver = SSA_NAME_VERSION (use);
379*1debfc3dSmrg   def_bb = gimple_bb (SSA_NAME_DEF_STMT (use));
380*1debfc3dSmrg   if (!def_bb)
381*1debfc3dSmrg     return;
382*1debfc3dSmrg   def_loop = def_bb->loop_father;
383*1debfc3dSmrg 
384*1debfc3dSmrg   /* If the definition is not inside a loop, it is not interesting.  */
385*1debfc3dSmrg   if (!loop_outer (def_loop))
386*1debfc3dSmrg     return;
387*1debfc3dSmrg 
388*1debfc3dSmrg   /* If the use is not outside of the loop it is defined in, it is not
389*1debfc3dSmrg      interesting.  */
390*1debfc3dSmrg   if (flow_bb_inside_loop_p (def_loop, bb))
391*1debfc3dSmrg     return;
392*1debfc3dSmrg 
393*1debfc3dSmrg   /* If we're seeing VER for the first time, we still have to allocate
394*1debfc3dSmrg      a bitmap for its uses.  */
395*1debfc3dSmrg   if (bitmap_set_bit (need_phis, ver))
396*1debfc3dSmrg     use_blocks[ver] = BITMAP_ALLOC (&loop_renamer_obstack);
397*1debfc3dSmrg   bitmap_set_bit (use_blocks[ver], bb->index);
398*1debfc3dSmrg }
399*1debfc3dSmrg 
400*1debfc3dSmrg /* For uses matching USE_FLAGS in STMT, mark names that are used outside of the
401*1debfc3dSmrg    loop they are defined to rewrite.  Record the set of blocks in which the ssa
402*1debfc3dSmrg    names are used to USE_BLOCKS, and the ssa names themselves to NEED_PHIS.  */
403*1debfc3dSmrg 
404*1debfc3dSmrg static void
405*1debfc3dSmrg find_uses_to_rename_stmt (gimple *stmt, bitmap *use_blocks, bitmap need_phis,
406*1debfc3dSmrg 			  int use_flags)
407*1debfc3dSmrg {
408*1debfc3dSmrg   ssa_op_iter iter;
409*1debfc3dSmrg   tree var;
410*1debfc3dSmrg   basic_block bb = gimple_bb (stmt);
411*1debfc3dSmrg 
412*1debfc3dSmrg   if (is_gimple_debug (stmt))
413*1debfc3dSmrg     return;
414*1debfc3dSmrg 
415*1debfc3dSmrg   /* FOR_EACH_SSA_TREE_OPERAND iterator does not allows SSA_OP_VIRTUAL_USES
416*1debfc3dSmrg      only.  */
417*1debfc3dSmrg   if (use_flags == SSA_OP_VIRTUAL_USES)
418*1debfc3dSmrg     {
419*1debfc3dSmrg       tree vuse = gimple_vuse (stmt);
420*1debfc3dSmrg       if (vuse != NULL_TREE)
421*1debfc3dSmrg 	find_uses_to_rename_use (bb, gimple_vuse (stmt), use_blocks, need_phis);
422*1debfc3dSmrg     }
423*1debfc3dSmrg   else
424*1debfc3dSmrg     FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, use_flags)
425*1debfc3dSmrg       find_uses_to_rename_use (bb, var, use_blocks, need_phis);
426*1debfc3dSmrg }
427*1debfc3dSmrg 
428*1debfc3dSmrg /* Marks names matching USE_FLAGS that are used in BB and outside of the loop
429*1debfc3dSmrg    they are defined in for rewrite.  Records the set of blocks in which the ssa
430*1debfc3dSmrg    names are used to USE_BLOCKS.  Record the SSA names that will
431*1debfc3dSmrg    need exit PHIs in NEED_PHIS.  */
432*1debfc3dSmrg 
433*1debfc3dSmrg static void
434*1debfc3dSmrg find_uses_to_rename_bb (basic_block bb, bitmap *use_blocks, bitmap need_phis,
435*1debfc3dSmrg 			int use_flags)
436*1debfc3dSmrg {
437*1debfc3dSmrg   edge e;
438*1debfc3dSmrg   edge_iterator ei;
439*1debfc3dSmrg   bool do_virtuals = (use_flags & SSA_OP_VIRTUAL_USES) != 0;
440*1debfc3dSmrg   bool do_nonvirtuals = (use_flags & SSA_OP_USE) != 0;
441*1debfc3dSmrg 
442*1debfc3dSmrg   FOR_EACH_EDGE (e, ei, bb->succs)
443*1debfc3dSmrg     for (gphi_iterator bsi = gsi_start_phis (e->dest); !gsi_end_p (bsi);
444*1debfc3dSmrg 	 gsi_next (&bsi))
445*1debfc3dSmrg       {
446*1debfc3dSmrg         gphi *phi = bsi.phi ();
447*1debfc3dSmrg 	bool virtual_p = virtual_operand_p (gimple_phi_result (phi));
448*1debfc3dSmrg 	if ((virtual_p && do_virtuals)
449*1debfc3dSmrg 	    || (!virtual_p && do_nonvirtuals))
450*1debfc3dSmrg 	  find_uses_to_rename_use (bb, PHI_ARG_DEF_FROM_EDGE (phi, e),
451*1debfc3dSmrg 				   use_blocks, need_phis);
452*1debfc3dSmrg       }
453*1debfc3dSmrg 
454*1debfc3dSmrg   for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
455*1debfc3dSmrg        gsi_next (&bsi))
456*1debfc3dSmrg     find_uses_to_rename_stmt (gsi_stmt (bsi), use_blocks, need_phis,
457*1debfc3dSmrg 			      use_flags);
458*1debfc3dSmrg }
459*1debfc3dSmrg 
460*1debfc3dSmrg /* Marks names matching USE_FLAGS that are used outside of the loop they are
461*1debfc3dSmrg    defined in for rewrite.  Records the set of blocks in which the ssa names are
462*1debfc3dSmrg    used to USE_BLOCKS.  Record the SSA names that will need exit PHIs in
463*1debfc3dSmrg    NEED_PHIS.  If CHANGED_BBS is not NULL, scan only blocks in this set.  */
464*1debfc3dSmrg 
465*1debfc3dSmrg static void
466*1debfc3dSmrg find_uses_to_rename (bitmap changed_bbs, bitmap *use_blocks, bitmap need_phis,
467*1debfc3dSmrg 		     int use_flags)
468*1debfc3dSmrg {
469*1debfc3dSmrg   basic_block bb;
470*1debfc3dSmrg   unsigned index;
471*1debfc3dSmrg   bitmap_iterator bi;
472*1debfc3dSmrg 
473*1debfc3dSmrg   if (changed_bbs)
474*1debfc3dSmrg     EXECUTE_IF_SET_IN_BITMAP (changed_bbs, 0, index, bi)
475*1debfc3dSmrg       {
476*1debfc3dSmrg 	bb = BASIC_BLOCK_FOR_FN (cfun, index);
477*1debfc3dSmrg 	if (bb)
478*1debfc3dSmrg 	  find_uses_to_rename_bb (bb, use_blocks, need_phis, use_flags);
479*1debfc3dSmrg       }
480*1debfc3dSmrg   else
481*1debfc3dSmrg     FOR_EACH_BB_FN (bb, cfun)
482*1debfc3dSmrg       find_uses_to_rename_bb (bb, use_blocks, need_phis, use_flags);
483*1debfc3dSmrg }
484*1debfc3dSmrg 
485*1debfc3dSmrg /* Mark uses of DEF that are used outside of the loop they are defined in for
486*1debfc3dSmrg    rewrite.  Record the set of blocks in which the ssa names are used to
487*1debfc3dSmrg    USE_BLOCKS.  Record the SSA names that will need exit PHIs in NEED_PHIS.  */
488*1debfc3dSmrg 
489*1debfc3dSmrg static void
490*1debfc3dSmrg find_uses_to_rename_def (tree def, bitmap *use_blocks, bitmap need_phis)
491*1debfc3dSmrg {
492*1debfc3dSmrg   gimple *use_stmt;
493*1debfc3dSmrg   imm_use_iterator imm_iter;
494*1debfc3dSmrg 
495*1debfc3dSmrg   FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, def)
496*1debfc3dSmrg     {
497*1debfc3dSmrg       if (is_gimple_debug (use_stmt))
498*1debfc3dSmrg 	continue;
499*1debfc3dSmrg 
500*1debfc3dSmrg       basic_block use_bb = gimple_bb (use_stmt);
501*1debfc3dSmrg 
502*1debfc3dSmrg       use_operand_p use_p;
503*1debfc3dSmrg       FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
504*1debfc3dSmrg 	{
505*1debfc3dSmrg 	  if (gimple_code (use_stmt) == GIMPLE_PHI)
506*1debfc3dSmrg 	    {
507*1debfc3dSmrg 	      edge e = gimple_phi_arg_edge (as_a <gphi *> (use_stmt),
508*1debfc3dSmrg 					    PHI_ARG_INDEX_FROM_USE (use_p));
509*1debfc3dSmrg 	      use_bb = e->src;
510*1debfc3dSmrg 	    }
511*1debfc3dSmrg 	  find_uses_to_rename_use (use_bb, USE_FROM_PTR (use_p), use_blocks,
512*1debfc3dSmrg 				   need_phis);
513*1debfc3dSmrg 	}
514*1debfc3dSmrg     }
515*1debfc3dSmrg }
516*1debfc3dSmrg 
517*1debfc3dSmrg /* Marks names matching USE_FLAGS that are defined in LOOP and used outside of
518*1debfc3dSmrg    it for rewrite.  Records the set of blocks in which the ssa names are used to
519*1debfc3dSmrg    USE_BLOCKS.  Record the SSA names that will need exit PHIs in NEED_PHIS.  */
520*1debfc3dSmrg 
521*1debfc3dSmrg static void
522*1debfc3dSmrg find_uses_to_rename_in_loop (struct loop *loop, bitmap *use_blocks,
523*1debfc3dSmrg 			     bitmap need_phis, int use_flags)
524*1debfc3dSmrg {
525*1debfc3dSmrg   bool do_virtuals = (use_flags & SSA_OP_VIRTUAL_USES) != 0;
526*1debfc3dSmrg   bool do_nonvirtuals = (use_flags & SSA_OP_USE) != 0;
527*1debfc3dSmrg   int def_flags = ((do_virtuals ? SSA_OP_VIRTUAL_DEFS : 0)
528*1debfc3dSmrg 		   | (do_nonvirtuals ? SSA_OP_DEF : 0));
529*1debfc3dSmrg 
530*1debfc3dSmrg 
531*1debfc3dSmrg   basic_block *bbs = get_loop_body (loop);
532*1debfc3dSmrg 
533*1debfc3dSmrg   for (unsigned int i = 0; i < loop->num_nodes; i++)
534*1debfc3dSmrg     {
535*1debfc3dSmrg       basic_block bb = bbs[i];
536*1debfc3dSmrg 
537*1debfc3dSmrg       for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
538*1debfc3dSmrg 	   gsi_next (&bsi))
539*1debfc3dSmrg 	{
540*1debfc3dSmrg 	  gphi *phi = bsi.phi ();
541*1debfc3dSmrg 	  tree res = gimple_phi_result (phi);
542*1debfc3dSmrg 	  bool virtual_p = virtual_operand_p (res);
543*1debfc3dSmrg 	  if ((virtual_p && do_virtuals)
544*1debfc3dSmrg 	      || (!virtual_p && do_nonvirtuals))
545*1debfc3dSmrg 	    find_uses_to_rename_def (res, use_blocks, need_phis);
546*1debfc3dSmrg       }
547*1debfc3dSmrg 
548*1debfc3dSmrg       for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
549*1debfc3dSmrg 	   gsi_next (&bsi))
550*1debfc3dSmrg 	{
551*1debfc3dSmrg 	  gimple *stmt = gsi_stmt (bsi);
552*1debfc3dSmrg 	  /* FOR_EACH_SSA_TREE_OPERAND iterator does not allows
553*1debfc3dSmrg 	     SSA_OP_VIRTUAL_DEFS only.  */
554*1debfc3dSmrg 	  if (def_flags == SSA_OP_VIRTUAL_DEFS)
555*1debfc3dSmrg 	    {
556*1debfc3dSmrg 	      tree vdef = gimple_vdef (stmt);
557*1debfc3dSmrg 	      if (vdef != NULL)
558*1debfc3dSmrg 		find_uses_to_rename_def (vdef, use_blocks, need_phis);
559*1debfc3dSmrg 	    }
560*1debfc3dSmrg 	  else
561*1debfc3dSmrg 	    {
562*1debfc3dSmrg 	      tree var;
563*1debfc3dSmrg 	      ssa_op_iter iter;
564*1debfc3dSmrg 	      FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, def_flags)
565*1debfc3dSmrg 		find_uses_to_rename_def (var, use_blocks, need_phis);
566*1debfc3dSmrg 	    }
567*1debfc3dSmrg 	}
568*1debfc3dSmrg     }
569*1debfc3dSmrg 
570*1debfc3dSmrg   XDELETEVEC (bbs);
571*1debfc3dSmrg }
572*1debfc3dSmrg 
573*1debfc3dSmrg /* Rewrites the program into a loop closed ssa form -- i.e. inserts extra
574*1debfc3dSmrg    phi nodes to ensure that no variable is used outside the loop it is
575*1debfc3dSmrg    defined in.
576*1debfc3dSmrg 
577*1debfc3dSmrg    This strengthening of the basic ssa form has several advantages:
578*1debfc3dSmrg 
579*1debfc3dSmrg    1) Updating it during unrolling/peeling/versioning is trivial, since
580*1debfc3dSmrg       we do not need to care about the uses outside of the loop.
581*1debfc3dSmrg       The same applies to virtual operands which are also rewritten into
582*1debfc3dSmrg       loop closed SSA form.  Note that virtual operands are always live
583*1debfc3dSmrg       until function exit.
584*1debfc3dSmrg    2) The behavior of all uses of an induction variable is the same.
585*1debfc3dSmrg       Without this, you need to distinguish the case when the variable
586*1debfc3dSmrg       is used outside of the loop it is defined in, for example
587*1debfc3dSmrg 
588*1debfc3dSmrg       for (i = 0; i < 100; i++)
589*1debfc3dSmrg 	{
590*1debfc3dSmrg 	  for (j = 0; j < 100; j++)
591*1debfc3dSmrg 	    {
592*1debfc3dSmrg 	      k = i + j;
593*1debfc3dSmrg 	      use1 (k);
594*1debfc3dSmrg 	    }
595*1debfc3dSmrg 	  use2 (k);
596*1debfc3dSmrg 	}
597*1debfc3dSmrg 
598*1debfc3dSmrg       Looking from the outer loop with the normal SSA form, the first use of k
599*1debfc3dSmrg       is not well-behaved, while the second one is an induction variable with
600*1debfc3dSmrg       base 99 and step 1.
601*1debfc3dSmrg 
602*1debfc3dSmrg       If LOOP is non-null, only rewrite uses that have defs in LOOP.  Otherwise,
603*1debfc3dSmrg       if CHANGED_BBS is not NULL, we look for uses outside loops only in the
604*1debfc3dSmrg       basic blocks in this set.
605*1debfc3dSmrg 
606*1debfc3dSmrg       USE_FLAGS allows us to specify whether we want virtual, non-virtual or
607*1debfc3dSmrg       both variables rewritten.
608*1debfc3dSmrg 
609*1debfc3dSmrg       UPDATE_FLAG is used in the call to update_ssa.  See
610*1debfc3dSmrg       TODO_update_ssa* for documentation.  */
611*1debfc3dSmrg 
612*1debfc3dSmrg void
613*1debfc3dSmrg rewrite_into_loop_closed_ssa_1 (bitmap changed_bbs, unsigned update_flag,
614*1debfc3dSmrg 				int use_flags, struct loop *loop)
615*1debfc3dSmrg {
616*1debfc3dSmrg   bitmap *use_blocks;
617*1debfc3dSmrg   bitmap names_to_rename;
618*1debfc3dSmrg 
619*1debfc3dSmrg   loops_state_set (LOOP_CLOSED_SSA);
620*1debfc3dSmrg   if (number_of_loops (cfun) <= 1)
621*1debfc3dSmrg     return;
622*1debfc3dSmrg 
623*1debfc3dSmrg   /* If the pass has caused the SSA form to be out-of-date, update it
624*1debfc3dSmrg      now.  */
625*1debfc3dSmrg   if (update_flag != 0)
626*1debfc3dSmrg     update_ssa (update_flag);
627*1debfc3dSmrg   else if (flag_checking)
628*1debfc3dSmrg     verify_ssa (true, true);
629*1debfc3dSmrg 
630*1debfc3dSmrg   bitmap_obstack_initialize (&loop_renamer_obstack);
631*1debfc3dSmrg 
632*1debfc3dSmrg   names_to_rename = BITMAP_ALLOC (&loop_renamer_obstack);
633*1debfc3dSmrg 
634*1debfc3dSmrg   /* Uses of names to rename.  We don't have to initialize this array,
635*1debfc3dSmrg      because we know that we will only have entries for the SSA names
636*1debfc3dSmrg      in NAMES_TO_RENAME.  */
637*1debfc3dSmrg   use_blocks = XNEWVEC (bitmap, num_ssa_names);
638*1debfc3dSmrg 
639*1debfc3dSmrg   if (loop != NULL)
640*1debfc3dSmrg     {
641*1debfc3dSmrg       gcc_assert (changed_bbs == NULL);
642*1debfc3dSmrg       find_uses_to_rename_in_loop (loop, use_blocks, names_to_rename,
643*1debfc3dSmrg 				   use_flags);
644*1debfc3dSmrg     }
645*1debfc3dSmrg   else
646*1debfc3dSmrg     {
647*1debfc3dSmrg       gcc_assert (loop == NULL);
648*1debfc3dSmrg       find_uses_to_rename (changed_bbs, use_blocks, names_to_rename, use_flags);
649*1debfc3dSmrg     }
650*1debfc3dSmrg 
651*1debfc3dSmrg   if (!bitmap_empty_p (names_to_rename))
652*1debfc3dSmrg     {
653*1debfc3dSmrg       /* An array of bitmaps where LOOP_EXITS[I] is the set of basic blocks
654*1debfc3dSmrg 	 that are the destination of an edge exiting loop number I.  */
655*1debfc3dSmrg       bitmap *loop_exits = XNEWVEC (bitmap, number_of_loops (cfun));
656*1debfc3dSmrg       get_loops_exits (loop_exits);
657*1debfc3dSmrg 
658*1debfc3dSmrg       /* Add the PHI nodes on exits of the loops for the names we need to
659*1debfc3dSmrg 	 rewrite.  */
660*1debfc3dSmrg       add_exit_phis (names_to_rename, use_blocks, loop_exits);
661*1debfc3dSmrg 
662*1debfc3dSmrg       free (loop_exits);
663*1debfc3dSmrg 
664*1debfc3dSmrg       /* Fix up all the names found to be used outside their original
665*1debfc3dSmrg 	 loops.  */
666*1debfc3dSmrg       update_ssa (TODO_update_ssa);
667*1debfc3dSmrg     }
668*1debfc3dSmrg 
669*1debfc3dSmrg   bitmap_obstack_release (&loop_renamer_obstack);
670*1debfc3dSmrg   free (use_blocks);
671*1debfc3dSmrg }
672*1debfc3dSmrg 
673*1debfc3dSmrg /* Rewrites the non-virtual defs and uses into a loop closed ssa form.  If
674*1debfc3dSmrg    CHANGED_BBS is not NULL, we look for uses outside loops only in the basic
675*1debfc3dSmrg    blocks in this set.  UPDATE_FLAG is used in the call to update_ssa.  See
676*1debfc3dSmrg    TODO_update_ssa* for documentation.  */
677*1debfc3dSmrg 
678*1debfc3dSmrg void
679*1debfc3dSmrg rewrite_into_loop_closed_ssa (bitmap changed_bbs, unsigned update_flag)
680*1debfc3dSmrg {
681*1debfc3dSmrg   rewrite_into_loop_closed_ssa_1 (changed_bbs, update_flag, SSA_OP_USE, NULL);
682*1debfc3dSmrg }
683*1debfc3dSmrg 
684*1debfc3dSmrg /* Rewrites virtual defs and uses with def in LOOP into loop closed ssa
685*1debfc3dSmrg    form.  */
686*1debfc3dSmrg 
687*1debfc3dSmrg void
688*1debfc3dSmrg rewrite_virtuals_into_loop_closed_ssa (struct loop *loop)
689*1debfc3dSmrg {
690*1debfc3dSmrg   rewrite_into_loop_closed_ssa_1 (NULL, 0, SSA_OP_VIRTUAL_USES, loop);
691*1debfc3dSmrg }
692*1debfc3dSmrg 
693*1debfc3dSmrg /* Check invariants of the loop closed ssa form for the USE in BB.  */
694*1debfc3dSmrg 
695*1debfc3dSmrg static void
696*1debfc3dSmrg check_loop_closed_ssa_use (basic_block bb, tree use)
697*1debfc3dSmrg {
698*1debfc3dSmrg   gimple *def;
699*1debfc3dSmrg   basic_block def_bb;
700*1debfc3dSmrg 
701*1debfc3dSmrg   if (TREE_CODE (use) != SSA_NAME || virtual_operand_p (use))
702*1debfc3dSmrg     return;
703*1debfc3dSmrg 
704*1debfc3dSmrg   def = SSA_NAME_DEF_STMT (use);
705*1debfc3dSmrg   def_bb = gimple_bb (def);
706*1debfc3dSmrg   gcc_assert (!def_bb
707*1debfc3dSmrg 	      || flow_bb_inside_loop_p (def_bb->loop_father, bb));
708*1debfc3dSmrg }
709*1debfc3dSmrg 
710*1debfc3dSmrg /* Checks invariants of loop closed ssa form in statement STMT in BB.  */
711*1debfc3dSmrg 
712*1debfc3dSmrg static void
713*1debfc3dSmrg check_loop_closed_ssa_stmt (basic_block bb, gimple *stmt)
714*1debfc3dSmrg {
715*1debfc3dSmrg   ssa_op_iter iter;
716*1debfc3dSmrg   tree var;
717*1debfc3dSmrg 
718*1debfc3dSmrg   if (is_gimple_debug (stmt))
719*1debfc3dSmrg     return;
720*1debfc3dSmrg 
721*1debfc3dSmrg   FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_USE)
722*1debfc3dSmrg     check_loop_closed_ssa_use (bb, var);
723*1debfc3dSmrg }
724*1debfc3dSmrg 
725*1debfc3dSmrg /* Checks that invariants of the loop closed ssa form are preserved.
726*1debfc3dSmrg    Call verify_ssa when VERIFY_SSA_P is true.  */
727*1debfc3dSmrg 
728*1debfc3dSmrg DEBUG_FUNCTION void
729*1debfc3dSmrg verify_loop_closed_ssa (bool verify_ssa_p)
730*1debfc3dSmrg {
731*1debfc3dSmrg   basic_block bb;
732*1debfc3dSmrg   edge e;
733*1debfc3dSmrg   edge_iterator ei;
734*1debfc3dSmrg 
735*1debfc3dSmrg   if (number_of_loops (cfun) <= 1)
736*1debfc3dSmrg     return;
737*1debfc3dSmrg 
738*1debfc3dSmrg   if (verify_ssa_p)
739*1debfc3dSmrg     verify_ssa (false, true);
740*1debfc3dSmrg 
741*1debfc3dSmrg   timevar_push (TV_VERIFY_LOOP_CLOSED);
742*1debfc3dSmrg 
743*1debfc3dSmrg   FOR_EACH_BB_FN (bb, cfun)
744*1debfc3dSmrg     {
745*1debfc3dSmrg       for (gphi_iterator bsi = gsi_start_phis (bb); !gsi_end_p (bsi);
746*1debfc3dSmrg 	   gsi_next (&bsi))
747*1debfc3dSmrg 	{
748*1debfc3dSmrg 	  gphi *phi = bsi.phi ();
749*1debfc3dSmrg 	  FOR_EACH_EDGE (e, ei, bb->preds)
750*1debfc3dSmrg 	    check_loop_closed_ssa_use (e->src,
751*1debfc3dSmrg 				       PHI_ARG_DEF_FROM_EDGE (phi, e));
752*1debfc3dSmrg 	}
753*1debfc3dSmrg 
754*1debfc3dSmrg       for (gimple_stmt_iterator bsi = gsi_start_bb (bb); !gsi_end_p (bsi);
755*1debfc3dSmrg 	   gsi_next (&bsi))
756*1debfc3dSmrg 	check_loop_closed_ssa_stmt (bb, gsi_stmt (bsi));
757*1debfc3dSmrg     }
758*1debfc3dSmrg 
759*1debfc3dSmrg   timevar_pop (TV_VERIFY_LOOP_CLOSED);
760*1debfc3dSmrg }
761*1debfc3dSmrg 
762*1debfc3dSmrg /* Split loop exit edge EXIT.  The things are a bit complicated by a need to
763*1debfc3dSmrg    preserve the loop closed ssa form.  The newly created block is returned.  */
764*1debfc3dSmrg 
765*1debfc3dSmrg basic_block
766*1debfc3dSmrg split_loop_exit_edge (edge exit)
767*1debfc3dSmrg {
768*1debfc3dSmrg   basic_block dest = exit->dest;
769*1debfc3dSmrg   basic_block bb = split_edge (exit);
770*1debfc3dSmrg   gphi *phi, *new_phi;
771*1debfc3dSmrg   tree new_name, name;
772*1debfc3dSmrg   use_operand_p op_p;
773*1debfc3dSmrg   gphi_iterator psi;
774*1debfc3dSmrg   source_location locus;
775*1debfc3dSmrg 
776*1debfc3dSmrg   for (psi = gsi_start_phis (dest); !gsi_end_p (psi); gsi_next (&psi))
777*1debfc3dSmrg     {
778*1debfc3dSmrg       phi = psi.phi ();
779*1debfc3dSmrg       op_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, single_succ_edge (bb));
780*1debfc3dSmrg       locus = gimple_phi_arg_location_from_edge (phi, single_succ_edge (bb));
781*1debfc3dSmrg 
782*1debfc3dSmrg       name = USE_FROM_PTR (op_p);
783*1debfc3dSmrg 
784*1debfc3dSmrg       /* If the argument of the PHI node is a constant, we do not need
785*1debfc3dSmrg 	 to keep it inside loop.  */
786*1debfc3dSmrg       if (TREE_CODE (name) != SSA_NAME)
787*1debfc3dSmrg 	continue;
788*1debfc3dSmrg 
789*1debfc3dSmrg       /* Otherwise create an auxiliary phi node that will copy the value
790*1debfc3dSmrg 	 of the SSA name out of the loop.  */
791*1debfc3dSmrg       new_name = duplicate_ssa_name (name, NULL);
792*1debfc3dSmrg       new_phi = create_phi_node (new_name, bb);
793*1debfc3dSmrg       add_phi_arg (new_phi, name, exit, locus);
794*1debfc3dSmrg       SET_USE (op_p, new_name);
795*1debfc3dSmrg     }
796*1debfc3dSmrg 
797*1debfc3dSmrg   return bb;
798*1debfc3dSmrg }
799*1debfc3dSmrg 
800*1debfc3dSmrg /* Returns the basic block in that statements should be emitted for induction
801*1debfc3dSmrg    variables incremented at the end of the LOOP.  */
802*1debfc3dSmrg 
803*1debfc3dSmrg basic_block
804*1debfc3dSmrg ip_end_pos (struct loop *loop)
805*1debfc3dSmrg {
806*1debfc3dSmrg   return loop->latch;
807*1debfc3dSmrg }
808*1debfc3dSmrg 
809*1debfc3dSmrg /* Returns the basic block in that statements should be emitted for induction
810*1debfc3dSmrg    variables incremented just before exit condition of a LOOP.  */
811*1debfc3dSmrg 
812*1debfc3dSmrg basic_block
813*1debfc3dSmrg ip_normal_pos (struct loop *loop)
814*1debfc3dSmrg {
815*1debfc3dSmrg   gimple *last;
816*1debfc3dSmrg   basic_block bb;
817*1debfc3dSmrg   edge exit;
818*1debfc3dSmrg 
819*1debfc3dSmrg   if (!single_pred_p (loop->latch))
820*1debfc3dSmrg     return NULL;
821*1debfc3dSmrg 
822*1debfc3dSmrg   bb = single_pred (loop->latch);
823*1debfc3dSmrg   last = last_stmt (bb);
824*1debfc3dSmrg   if (!last
825*1debfc3dSmrg       || gimple_code (last) != GIMPLE_COND)
826*1debfc3dSmrg     return NULL;
827*1debfc3dSmrg 
828*1debfc3dSmrg   exit = EDGE_SUCC (bb, 0);
829*1debfc3dSmrg   if (exit->dest == loop->latch)
830*1debfc3dSmrg     exit = EDGE_SUCC (bb, 1);
831*1debfc3dSmrg 
832*1debfc3dSmrg   if (flow_bb_inside_loop_p (loop, exit->dest))
833*1debfc3dSmrg     return NULL;
834*1debfc3dSmrg 
835*1debfc3dSmrg   return bb;
836*1debfc3dSmrg }
837*1debfc3dSmrg 
838*1debfc3dSmrg /* Stores the standard position for induction variable increment in LOOP
839*1debfc3dSmrg    (just before the exit condition if it is available and latch block is empty,
840*1debfc3dSmrg    end of the latch block otherwise) to BSI.  INSERT_AFTER is set to true if
841*1debfc3dSmrg    the increment should be inserted after *BSI.  */
842*1debfc3dSmrg 
843*1debfc3dSmrg void
844*1debfc3dSmrg standard_iv_increment_position (struct loop *loop, gimple_stmt_iterator *bsi,
845*1debfc3dSmrg 				bool *insert_after)
846*1debfc3dSmrg {
847*1debfc3dSmrg   basic_block bb = ip_normal_pos (loop), latch = ip_end_pos (loop);
848*1debfc3dSmrg   gimple *last = last_stmt (latch);
849*1debfc3dSmrg 
850*1debfc3dSmrg   if (!bb
851*1debfc3dSmrg       || (last && gimple_code (last) != GIMPLE_LABEL))
852*1debfc3dSmrg     {
853*1debfc3dSmrg       *bsi = gsi_last_bb (latch);
854*1debfc3dSmrg       *insert_after = true;
855*1debfc3dSmrg     }
856*1debfc3dSmrg   else
857*1debfc3dSmrg     {
858*1debfc3dSmrg       *bsi = gsi_last_bb (bb);
859*1debfc3dSmrg       *insert_after = false;
860*1debfc3dSmrg     }
861*1debfc3dSmrg }
862*1debfc3dSmrg 
863*1debfc3dSmrg /* Copies phi node arguments for duplicated blocks.  The index of the first
864*1debfc3dSmrg    duplicated block is FIRST_NEW_BLOCK.  */
865*1debfc3dSmrg 
866*1debfc3dSmrg static void
867*1debfc3dSmrg copy_phi_node_args (unsigned first_new_block)
868*1debfc3dSmrg {
869*1debfc3dSmrg   unsigned i;
870*1debfc3dSmrg 
871*1debfc3dSmrg   for (i = first_new_block; i < (unsigned) last_basic_block_for_fn (cfun); i++)
872*1debfc3dSmrg     BASIC_BLOCK_FOR_FN (cfun, i)->flags |= BB_DUPLICATED;
873*1debfc3dSmrg 
874*1debfc3dSmrg   for (i = first_new_block; i < (unsigned) last_basic_block_for_fn (cfun); i++)
875*1debfc3dSmrg     add_phi_args_after_copy_bb (BASIC_BLOCK_FOR_FN (cfun, i));
876*1debfc3dSmrg 
877*1debfc3dSmrg   for (i = first_new_block; i < (unsigned) last_basic_block_for_fn (cfun); i++)
878*1debfc3dSmrg     BASIC_BLOCK_FOR_FN (cfun, i)->flags &= ~BB_DUPLICATED;
879*1debfc3dSmrg }
880*1debfc3dSmrg 
881*1debfc3dSmrg 
882*1debfc3dSmrg /* The same as cfgloopmanip.c:duplicate_loop_to_header_edge, but also
883*1debfc3dSmrg    updates the PHI nodes at start of the copied region.  In order to
884*1debfc3dSmrg    achieve this, only loops whose exits all lead to the same location
885*1debfc3dSmrg    are handled.
886*1debfc3dSmrg 
887*1debfc3dSmrg    Notice that we do not completely update the SSA web after
888*1debfc3dSmrg    duplication.  The caller is responsible for calling update_ssa
889*1debfc3dSmrg    after the loop has been duplicated.  */
890*1debfc3dSmrg 
891*1debfc3dSmrg bool
892*1debfc3dSmrg gimple_duplicate_loop_to_header_edge (struct loop *loop, edge e,
893*1debfc3dSmrg 				    unsigned int ndupl, sbitmap wont_exit,
894*1debfc3dSmrg 				    edge orig, vec<edge> *to_remove,
895*1debfc3dSmrg 				    int flags)
896*1debfc3dSmrg {
897*1debfc3dSmrg   unsigned first_new_block;
898*1debfc3dSmrg 
899*1debfc3dSmrg   if (!loops_state_satisfies_p (LOOPS_HAVE_SIMPLE_LATCHES))
900*1debfc3dSmrg     return false;
901*1debfc3dSmrg   if (!loops_state_satisfies_p (LOOPS_HAVE_PREHEADERS))
902*1debfc3dSmrg     return false;
903*1debfc3dSmrg 
904*1debfc3dSmrg   first_new_block = last_basic_block_for_fn (cfun);
905*1debfc3dSmrg   if (!duplicate_loop_to_header_edge (loop, e, ndupl, wont_exit,
906*1debfc3dSmrg 				      orig, to_remove, flags))
907*1debfc3dSmrg     return false;
908*1debfc3dSmrg 
909*1debfc3dSmrg   /* Readd the removed phi args for e.  */
910*1debfc3dSmrg   flush_pending_stmts (e);
911*1debfc3dSmrg 
912*1debfc3dSmrg   /* Copy the phi node arguments.  */
913*1debfc3dSmrg   copy_phi_node_args (first_new_block);
914*1debfc3dSmrg 
915*1debfc3dSmrg   scev_reset ();
916*1debfc3dSmrg 
917*1debfc3dSmrg   return true;
918*1debfc3dSmrg }
919*1debfc3dSmrg 
920*1debfc3dSmrg /* Returns true if we can unroll LOOP FACTOR times.  Number
921*1debfc3dSmrg    of iterations of the loop is returned in NITER.  */
922*1debfc3dSmrg 
923*1debfc3dSmrg bool
924*1debfc3dSmrg can_unroll_loop_p (struct loop *loop, unsigned factor,
925*1debfc3dSmrg 		   struct tree_niter_desc *niter)
926*1debfc3dSmrg {
927*1debfc3dSmrg   edge exit;
928*1debfc3dSmrg 
929*1debfc3dSmrg   /* Check whether unrolling is possible.  We only want to unroll loops
930*1debfc3dSmrg      for that we are able to determine number of iterations.  We also
931*1debfc3dSmrg      want to split the extra iterations of the loop from its end,
932*1debfc3dSmrg      therefore we require that the loop has precisely one
933*1debfc3dSmrg      exit.  */
934*1debfc3dSmrg 
935*1debfc3dSmrg   exit = single_dom_exit (loop);
936*1debfc3dSmrg   if (!exit)
937*1debfc3dSmrg     return false;
938*1debfc3dSmrg 
939*1debfc3dSmrg   if (!number_of_iterations_exit (loop, exit, niter, false)
940*1debfc3dSmrg       || niter->cmp == ERROR_MARK
941*1debfc3dSmrg       /* Scalar evolutions analysis might have copy propagated
942*1debfc3dSmrg 	 the abnormal ssa names into these expressions, hence
943*1debfc3dSmrg 	 emitting the computations based on them during loop
944*1debfc3dSmrg 	 unrolling might create overlapping life ranges for
945*1debfc3dSmrg 	 them, and failures in out-of-ssa.  */
946*1debfc3dSmrg       || contains_abnormal_ssa_name_p (niter->may_be_zero)
947*1debfc3dSmrg       || contains_abnormal_ssa_name_p (niter->control.base)
948*1debfc3dSmrg       || contains_abnormal_ssa_name_p (niter->control.step)
949*1debfc3dSmrg       || contains_abnormal_ssa_name_p (niter->bound))
950*1debfc3dSmrg     return false;
951*1debfc3dSmrg 
952*1debfc3dSmrg   /* And of course, we must be able to duplicate the loop.  */
953*1debfc3dSmrg   if (!can_duplicate_loop_p (loop))
954*1debfc3dSmrg     return false;
955*1debfc3dSmrg 
956*1debfc3dSmrg   /* The final loop should be small enough.  */
957*1debfc3dSmrg   if (tree_num_loop_insns (loop, &eni_size_weights) * factor
958*1debfc3dSmrg       > (unsigned) PARAM_VALUE (PARAM_MAX_UNROLLED_INSNS))
959*1debfc3dSmrg     return false;
960*1debfc3dSmrg 
961*1debfc3dSmrg   return true;
962*1debfc3dSmrg }
963*1debfc3dSmrg 
964*1debfc3dSmrg /* Determines the conditions that control execution of LOOP unrolled FACTOR
965*1debfc3dSmrg    times.  DESC is number of iterations of LOOP.  ENTER_COND is set to
966*1debfc3dSmrg    condition that must be true if the main loop can be entered.
967*1debfc3dSmrg    EXIT_BASE, EXIT_STEP, EXIT_CMP and EXIT_BOUND are set to values describing
968*1debfc3dSmrg    how the exit from the unrolled loop should be controlled.  */
969*1debfc3dSmrg 
970*1debfc3dSmrg static void
971*1debfc3dSmrg determine_exit_conditions (struct loop *loop, struct tree_niter_desc *desc,
972*1debfc3dSmrg 			   unsigned factor, tree *enter_cond,
973*1debfc3dSmrg 			   tree *exit_base, tree *exit_step,
974*1debfc3dSmrg 			   enum tree_code *exit_cmp, tree *exit_bound)
975*1debfc3dSmrg {
976*1debfc3dSmrg   gimple_seq stmts;
977*1debfc3dSmrg   tree base = desc->control.base;
978*1debfc3dSmrg   tree step = desc->control.step;
979*1debfc3dSmrg   tree bound = desc->bound;
980*1debfc3dSmrg   tree type = TREE_TYPE (step);
981*1debfc3dSmrg   tree bigstep, delta;
982*1debfc3dSmrg   tree min = lower_bound_in_type (type, type);
983*1debfc3dSmrg   tree max = upper_bound_in_type (type, type);
984*1debfc3dSmrg   enum tree_code cmp = desc->cmp;
985*1debfc3dSmrg   tree cond = boolean_true_node, assum;
986*1debfc3dSmrg 
987*1debfc3dSmrg   /* For pointers, do the arithmetics in the type of step.  */
988*1debfc3dSmrg   base = fold_convert (type, base);
989*1debfc3dSmrg   bound = fold_convert (type, bound);
990*1debfc3dSmrg 
991*1debfc3dSmrg   *enter_cond = boolean_false_node;
992*1debfc3dSmrg   *exit_base = NULL_TREE;
993*1debfc3dSmrg   *exit_step = NULL_TREE;
994*1debfc3dSmrg   *exit_cmp = ERROR_MARK;
995*1debfc3dSmrg   *exit_bound = NULL_TREE;
996*1debfc3dSmrg   gcc_assert (cmp != ERROR_MARK);
997*1debfc3dSmrg 
998*1debfc3dSmrg   /* We only need to be correct when we answer question
999*1debfc3dSmrg      "Do at least FACTOR more iterations remain?" in the unrolled loop.
1000*1debfc3dSmrg      Thus, transforming BASE + STEP * i <> BOUND to
1001*1debfc3dSmrg      BASE + STEP * i < BOUND is ok.  */
1002*1debfc3dSmrg   if (cmp == NE_EXPR)
1003*1debfc3dSmrg     {
1004*1debfc3dSmrg       if (tree_int_cst_sign_bit (step))
1005*1debfc3dSmrg 	cmp = GT_EXPR;
1006*1debfc3dSmrg       else
1007*1debfc3dSmrg 	cmp = LT_EXPR;
1008*1debfc3dSmrg     }
1009*1debfc3dSmrg   else if (cmp == LT_EXPR)
1010*1debfc3dSmrg     {
1011*1debfc3dSmrg       gcc_assert (!tree_int_cst_sign_bit (step));
1012*1debfc3dSmrg     }
1013*1debfc3dSmrg   else if (cmp == GT_EXPR)
1014*1debfc3dSmrg     {
1015*1debfc3dSmrg       gcc_assert (tree_int_cst_sign_bit (step));
1016*1debfc3dSmrg     }
1017*1debfc3dSmrg   else
1018*1debfc3dSmrg     gcc_unreachable ();
1019*1debfc3dSmrg 
1020*1debfc3dSmrg   /* The main body of the loop may be entered iff:
1021*1debfc3dSmrg 
1022*1debfc3dSmrg      1) desc->may_be_zero is false.
1023*1debfc3dSmrg      2) it is possible to check that there are at least FACTOR iterations
1024*1debfc3dSmrg 	of the loop, i.e., BOUND - step * FACTOR does not overflow.
1025*1debfc3dSmrg      3) # of iterations is at least FACTOR  */
1026*1debfc3dSmrg 
1027*1debfc3dSmrg   if (!integer_zerop (desc->may_be_zero))
1028*1debfc3dSmrg     cond = fold_build2 (TRUTH_AND_EXPR, boolean_type_node,
1029*1debfc3dSmrg 			invert_truthvalue (desc->may_be_zero),
1030*1debfc3dSmrg 			cond);
1031*1debfc3dSmrg 
1032*1debfc3dSmrg   bigstep = fold_build2 (MULT_EXPR, type, step,
1033*1debfc3dSmrg 			 build_int_cst_type (type, factor));
1034*1debfc3dSmrg   delta = fold_build2 (MINUS_EXPR, type, bigstep, step);
1035*1debfc3dSmrg   if (cmp == LT_EXPR)
1036*1debfc3dSmrg     assum = fold_build2 (GE_EXPR, boolean_type_node,
1037*1debfc3dSmrg 			 bound,
1038*1debfc3dSmrg 			 fold_build2 (PLUS_EXPR, type, min, delta));
1039*1debfc3dSmrg   else
1040*1debfc3dSmrg     assum = fold_build2 (LE_EXPR, boolean_type_node,
1041*1debfc3dSmrg 			 bound,
1042*1debfc3dSmrg 			 fold_build2 (PLUS_EXPR, type, max, delta));
1043*1debfc3dSmrg   cond = fold_build2 (TRUTH_AND_EXPR, boolean_type_node, assum, cond);
1044*1debfc3dSmrg 
1045*1debfc3dSmrg   bound = fold_build2 (MINUS_EXPR, type, bound, delta);
1046*1debfc3dSmrg   assum = fold_build2 (cmp, boolean_type_node, base, bound);
1047*1debfc3dSmrg   cond = fold_build2 (TRUTH_AND_EXPR, boolean_type_node, assum, cond);
1048*1debfc3dSmrg 
1049*1debfc3dSmrg   cond = force_gimple_operand (unshare_expr (cond), &stmts, false, NULL_TREE);
1050*1debfc3dSmrg   if (stmts)
1051*1debfc3dSmrg     gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
1052*1debfc3dSmrg   /* cond now may be a gimple comparison, which would be OK, but also any
1053*1debfc3dSmrg      other gimple rhs (say a && b).  In this case we need to force it to
1054*1debfc3dSmrg      operand.  */
1055*1debfc3dSmrg   if (!is_gimple_condexpr (cond))
1056*1debfc3dSmrg     {
1057*1debfc3dSmrg       cond = force_gimple_operand (cond, &stmts, true, NULL_TREE);
1058*1debfc3dSmrg       if (stmts)
1059*1debfc3dSmrg 	gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
1060*1debfc3dSmrg     }
1061*1debfc3dSmrg   *enter_cond = cond;
1062*1debfc3dSmrg 
1063*1debfc3dSmrg   base = force_gimple_operand (unshare_expr (base), &stmts, true, NULL_TREE);
1064*1debfc3dSmrg   if (stmts)
1065*1debfc3dSmrg     gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
1066*1debfc3dSmrg   bound = force_gimple_operand (unshare_expr (bound), &stmts, true, NULL_TREE);
1067*1debfc3dSmrg   if (stmts)
1068*1debfc3dSmrg     gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
1069*1debfc3dSmrg 
1070*1debfc3dSmrg   *exit_base = base;
1071*1debfc3dSmrg   *exit_step = bigstep;
1072*1debfc3dSmrg   *exit_cmp = cmp;
1073*1debfc3dSmrg   *exit_bound = bound;
1074*1debfc3dSmrg }
1075*1debfc3dSmrg 
1076*1debfc3dSmrg /* Scales the frequencies of all basic blocks in LOOP that are strictly
1077*1debfc3dSmrg    dominated by BB by NUM/DEN.  */
1078*1debfc3dSmrg 
1079*1debfc3dSmrg static void
1080*1debfc3dSmrg scale_dominated_blocks_in_loop (struct loop *loop, basic_block bb,
1081*1debfc3dSmrg 				int num, int den)
1082*1debfc3dSmrg {
1083*1debfc3dSmrg   basic_block son;
1084*1debfc3dSmrg 
1085*1debfc3dSmrg   if (den == 0)
1086*1debfc3dSmrg     return;
1087*1debfc3dSmrg 
1088*1debfc3dSmrg   for (son = first_dom_son (CDI_DOMINATORS, bb);
1089*1debfc3dSmrg        son;
1090*1debfc3dSmrg        son = next_dom_son (CDI_DOMINATORS, son))
1091*1debfc3dSmrg     {
1092*1debfc3dSmrg       if (!flow_bb_inside_loop_p (loop, son))
1093*1debfc3dSmrg 	continue;
1094*1debfc3dSmrg       scale_bbs_frequencies_int (&son, 1, num, den);
1095*1debfc3dSmrg       scale_dominated_blocks_in_loop (loop, son, num, den);
1096*1debfc3dSmrg     }
1097*1debfc3dSmrg }
1098*1debfc3dSmrg 
1099*1debfc3dSmrg /* Return estimated niter for LOOP after unrolling by FACTOR times.  */
1100*1debfc3dSmrg 
1101*1debfc3dSmrg gcov_type
1102*1debfc3dSmrg niter_for_unrolled_loop (struct loop *loop, unsigned factor)
1103*1debfc3dSmrg {
1104*1debfc3dSmrg   gcc_assert (factor != 0);
1105*1debfc3dSmrg   bool profile_p = false;
1106*1debfc3dSmrg   gcov_type est_niter = expected_loop_iterations_unbounded (loop, &profile_p);
1107*1debfc3dSmrg   gcov_type new_est_niter = est_niter / factor;
1108*1debfc3dSmrg 
1109*1debfc3dSmrg   /* Without profile feedback, loops for which we do not know a better estimate
1110*1debfc3dSmrg      are assumed to roll 10 times.  When we unroll such loop, it appears to
1111*1debfc3dSmrg      roll too little, and it may even seem to be cold.  To avoid this, we
1112*1debfc3dSmrg      ensure that the created loop appears to roll at least 5 times (but at
1113*1debfc3dSmrg      most as many times as before unrolling).  Don't do adjustment if profile
1114*1debfc3dSmrg      feedback is present.  */
1115*1debfc3dSmrg   if (new_est_niter < 5 && !profile_p)
1116*1debfc3dSmrg     {
1117*1debfc3dSmrg       if (est_niter < 5)
1118*1debfc3dSmrg 	new_est_niter = est_niter;
1119*1debfc3dSmrg       else
1120*1debfc3dSmrg 	new_est_niter = 5;
1121*1debfc3dSmrg     }
1122*1debfc3dSmrg 
1123*1debfc3dSmrg   return new_est_niter;
1124*1debfc3dSmrg }
1125*1debfc3dSmrg 
1126*1debfc3dSmrg /* Unroll LOOP FACTOR times.  DESC describes number of iterations of LOOP.
1127*1debfc3dSmrg    EXIT is the exit of the loop to that DESC corresponds.
1128*1debfc3dSmrg 
1129*1debfc3dSmrg    If N is number of iterations of the loop and MAY_BE_ZERO is the condition
1130*1debfc3dSmrg    under that loop exits in the first iteration even if N != 0,
1131*1debfc3dSmrg 
1132*1debfc3dSmrg    while (1)
1133*1debfc3dSmrg      {
1134*1debfc3dSmrg        x = phi (init, next);
1135*1debfc3dSmrg 
1136*1debfc3dSmrg        pre;
1137*1debfc3dSmrg        if (st)
1138*1debfc3dSmrg          break;
1139*1debfc3dSmrg        post;
1140*1debfc3dSmrg      }
1141*1debfc3dSmrg 
1142*1debfc3dSmrg    becomes (with possibly the exit conditions formulated a bit differently,
1143*1debfc3dSmrg    avoiding the need to create a new iv):
1144*1debfc3dSmrg 
1145*1debfc3dSmrg    if (MAY_BE_ZERO || N < FACTOR)
1146*1debfc3dSmrg      goto rest;
1147*1debfc3dSmrg 
1148*1debfc3dSmrg    do
1149*1debfc3dSmrg      {
1150*1debfc3dSmrg        x = phi (init, next);
1151*1debfc3dSmrg 
1152*1debfc3dSmrg        pre;
1153*1debfc3dSmrg        post;
1154*1debfc3dSmrg        pre;
1155*1debfc3dSmrg        post;
1156*1debfc3dSmrg        ...
1157*1debfc3dSmrg        pre;
1158*1debfc3dSmrg        post;
1159*1debfc3dSmrg        N -= FACTOR;
1160*1debfc3dSmrg 
1161*1debfc3dSmrg      } while (N >= FACTOR);
1162*1debfc3dSmrg 
1163*1debfc3dSmrg    rest:
1164*1debfc3dSmrg      init' = phi (init, x);
1165*1debfc3dSmrg 
1166*1debfc3dSmrg    while (1)
1167*1debfc3dSmrg      {
1168*1debfc3dSmrg        x = phi (init', next);
1169*1debfc3dSmrg 
1170*1debfc3dSmrg        pre;
1171*1debfc3dSmrg        if (st)
1172*1debfc3dSmrg          break;
1173*1debfc3dSmrg        post;
1174*1debfc3dSmrg      }
1175*1debfc3dSmrg 
1176*1debfc3dSmrg    Before the loop is unrolled, TRANSFORM is called for it (only for the
1177*1debfc3dSmrg    unrolled loop, but not for its versioned copy).  DATA is passed to
1178*1debfc3dSmrg    TRANSFORM.  */
1179*1debfc3dSmrg 
1180*1debfc3dSmrg /* Probability in % that the unrolled loop is entered.  Just a guess.  */
1181*1debfc3dSmrg #define PROB_UNROLLED_LOOP_ENTERED 90
1182*1debfc3dSmrg 
1183*1debfc3dSmrg void
1184*1debfc3dSmrg tree_transform_and_unroll_loop (struct loop *loop, unsigned factor,
1185*1debfc3dSmrg 				edge exit, struct tree_niter_desc *desc,
1186*1debfc3dSmrg 				transform_callback transform,
1187*1debfc3dSmrg 				void *data)
1188*1debfc3dSmrg {
1189*1debfc3dSmrg   gcond *exit_if;
1190*1debfc3dSmrg   tree ctr_before, ctr_after;
1191*1debfc3dSmrg   tree enter_main_cond, exit_base, exit_step, exit_bound;
1192*1debfc3dSmrg   enum tree_code exit_cmp;
1193*1debfc3dSmrg   gphi *phi_old_loop, *phi_new_loop, *phi_rest;
1194*1debfc3dSmrg   gphi_iterator psi_old_loop, psi_new_loop;
1195*1debfc3dSmrg   tree init, next, new_init;
1196*1debfc3dSmrg   struct loop *new_loop;
1197*1debfc3dSmrg   basic_block rest, exit_bb;
1198*1debfc3dSmrg   edge old_entry, new_entry, old_latch, precond_edge, new_exit;
1199*1debfc3dSmrg   edge new_nonexit, e;
1200*1debfc3dSmrg   gimple_stmt_iterator bsi;
1201*1debfc3dSmrg   use_operand_p op;
1202*1debfc3dSmrg   bool ok;
1203*1debfc3dSmrg   unsigned i, prob, prob_entry, scale_unrolled, scale_rest;
1204*1debfc3dSmrg   gcov_type freq_e, freq_h;
1205*1debfc3dSmrg   gcov_type new_est_niter = niter_for_unrolled_loop (loop, factor);
1206*1debfc3dSmrg   unsigned irr = loop_preheader_edge (loop)->flags & EDGE_IRREDUCIBLE_LOOP;
1207*1debfc3dSmrg   auto_vec<edge> to_remove;
1208*1debfc3dSmrg 
1209*1debfc3dSmrg   determine_exit_conditions (loop, desc, factor,
1210*1debfc3dSmrg 			     &enter_main_cond, &exit_base, &exit_step,
1211*1debfc3dSmrg 			     &exit_cmp, &exit_bound);
1212*1debfc3dSmrg 
1213*1debfc3dSmrg   /* Let us assume that the unrolled loop is quite likely to be entered.  */
1214*1debfc3dSmrg   if (integer_nonzerop (enter_main_cond))
1215*1debfc3dSmrg     prob_entry = REG_BR_PROB_BASE;
1216*1debfc3dSmrg   else
1217*1debfc3dSmrg     prob_entry = PROB_UNROLLED_LOOP_ENTERED * REG_BR_PROB_BASE / 100;
1218*1debfc3dSmrg 
1219*1debfc3dSmrg   /* The values for scales should keep profile consistent, and somewhat close
1220*1debfc3dSmrg      to correct.
1221*1debfc3dSmrg 
1222*1debfc3dSmrg      TODO: The current value of SCALE_REST makes it appear that the loop that
1223*1debfc3dSmrg      is created by splitting the remaining iterations of the unrolled loop is
1224*1debfc3dSmrg      executed the same number of times as the original loop, and with the same
1225*1debfc3dSmrg      frequencies, which is obviously wrong.  This does not appear to cause
1226*1debfc3dSmrg      problems, so we do not bother with fixing it for now.  To make the profile
1227*1debfc3dSmrg      correct, we would need to change the probability of the exit edge of the
1228*1debfc3dSmrg      loop, and recompute the distribution of frequencies in its body because
1229*1debfc3dSmrg      of this change (scale the frequencies of blocks before and after the exit
1230*1debfc3dSmrg      by appropriate factors).  */
1231*1debfc3dSmrg   scale_unrolled = prob_entry;
1232*1debfc3dSmrg   scale_rest = REG_BR_PROB_BASE;
1233*1debfc3dSmrg 
1234*1debfc3dSmrg   new_loop = loop_version (loop, enter_main_cond, NULL,
1235*1debfc3dSmrg 			   prob_entry, REG_BR_PROB_BASE - prob_entry,
1236*1debfc3dSmrg 			   scale_unrolled, scale_rest, true);
1237*1debfc3dSmrg   gcc_assert (new_loop != NULL);
1238*1debfc3dSmrg   update_ssa (TODO_update_ssa);
1239*1debfc3dSmrg 
1240*1debfc3dSmrg   /* Prepare the cfg and update the phi nodes.  Move the loop exit to the
1241*1debfc3dSmrg      loop latch (and make its condition dummy, for the moment).  */
1242*1debfc3dSmrg   rest = loop_preheader_edge (new_loop)->src;
1243*1debfc3dSmrg   precond_edge = single_pred_edge (rest);
1244*1debfc3dSmrg   split_edge (loop_latch_edge (loop));
1245*1debfc3dSmrg   exit_bb = single_pred (loop->latch);
1246*1debfc3dSmrg 
1247*1debfc3dSmrg   /* Since the exit edge will be removed, the frequency of all the blocks
1248*1debfc3dSmrg      in the loop that are dominated by it must be scaled by
1249*1debfc3dSmrg      1 / (1 - exit->probability).  */
1250*1debfc3dSmrg   scale_dominated_blocks_in_loop (loop, exit->src,
1251*1debfc3dSmrg 				  REG_BR_PROB_BASE,
1252*1debfc3dSmrg 				  REG_BR_PROB_BASE - exit->probability);
1253*1debfc3dSmrg 
1254*1debfc3dSmrg   bsi = gsi_last_bb (exit_bb);
1255*1debfc3dSmrg   exit_if = gimple_build_cond (EQ_EXPR, integer_zero_node,
1256*1debfc3dSmrg 			       integer_zero_node,
1257*1debfc3dSmrg 			       NULL_TREE, NULL_TREE);
1258*1debfc3dSmrg 
1259*1debfc3dSmrg   gsi_insert_after (&bsi, exit_if, GSI_NEW_STMT);
1260*1debfc3dSmrg   new_exit = make_edge (exit_bb, rest, EDGE_FALSE_VALUE | irr);
1261*1debfc3dSmrg   rescan_loop_exit (new_exit, true, false);
1262*1debfc3dSmrg 
1263*1debfc3dSmrg   /* Set the probability of new exit to the same of the old one.  Fix
1264*1debfc3dSmrg      the frequency of the latch block, by scaling it back by
1265*1debfc3dSmrg      1 - exit->probability.  */
1266*1debfc3dSmrg   new_exit->count = exit->count;
1267*1debfc3dSmrg   new_exit->probability = exit->probability;
1268*1debfc3dSmrg   new_nonexit = single_pred_edge (loop->latch);
1269*1debfc3dSmrg   new_nonexit->probability = REG_BR_PROB_BASE - exit->probability;
1270*1debfc3dSmrg   new_nonexit->flags = EDGE_TRUE_VALUE;
1271*1debfc3dSmrg   new_nonexit->count -= exit->count;
1272*1debfc3dSmrg   if (new_nonexit->count < 0)
1273*1debfc3dSmrg     new_nonexit->count = 0;
1274*1debfc3dSmrg   scale_bbs_frequencies_int (&loop->latch, 1, new_nonexit->probability,
1275*1debfc3dSmrg 			     REG_BR_PROB_BASE);
1276*1debfc3dSmrg 
1277*1debfc3dSmrg   old_entry = loop_preheader_edge (loop);
1278*1debfc3dSmrg   new_entry = loop_preheader_edge (new_loop);
1279*1debfc3dSmrg   old_latch = loop_latch_edge (loop);
1280*1debfc3dSmrg   for (psi_old_loop = gsi_start_phis (loop->header),
1281*1debfc3dSmrg        psi_new_loop = gsi_start_phis (new_loop->header);
1282*1debfc3dSmrg        !gsi_end_p (psi_old_loop);
1283*1debfc3dSmrg        gsi_next (&psi_old_loop), gsi_next (&psi_new_loop))
1284*1debfc3dSmrg     {
1285*1debfc3dSmrg       phi_old_loop = psi_old_loop.phi ();
1286*1debfc3dSmrg       phi_new_loop = psi_new_loop.phi ();
1287*1debfc3dSmrg 
1288*1debfc3dSmrg       init = PHI_ARG_DEF_FROM_EDGE (phi_old_loop, old_entry);
1289*1debfc3dSmrg       op = PHI_ARG_DEF_PTR_FROM_EDGE (phi_new_loop, new_entry);
1290*1debfc3dSmrg       gcc_assert (operand_equal_for_phi_arg_p (init, USE_FROM_PTR (op)));
1291*1debfc3dSmrg       next = PHI_ARG_DEF_FROM_EDGE (phi_old_loop, old_latch);
1292*1debfc3dSmrg 
1293*1debfc3dSmrg       /* Prefer using original variable as a base for the new ssa name.
1294*1debfc3dSmrg 	 This is necessary for virtual ops, and useful in order to avoid
1295*1debfc3dSmrg 	 losing debug info for real ops.  */
1296*1debfc3dSmrg       if (TREE_CODE (next) == SSA_NAME
1297*1debfc3dSmrg 	  && useless_type_conversion_p (TREE_TYPE (next),
1298*1debfc3dSmrg 					TREE_TYPE (init)))
1299*1debfc3dSmrg 	new_init = copy_ssa_name (next);
1300*1debfc3dSmrg       else if (TREE_CODE (init) == SSA_NAME
1301*1debfc3dSmrg 	       && useless_type_conversion_p (TREE_TYPE (init),
1302*1debfc3dSmrg 					     TREE_TYPE (next)))
1303*1debfc3dSmrg 	new_init = copy_ssa_name (init);
1304*1debfc3dSmrg       else if (useless_type_conversion_p (TREE_TYPE (next), TREE_TYPE (init)))
1305*1debfc3dSmrg 	new_init = make_temp_ssa_name (TREE_TYPE (next), NULL, "unrinittmp");
1306*1debfc3dSmrg       else
1307*1debfc3dSmrg 	new_init = make_temp_ssa_name (TREE_TYPE (init), NULL, "unrinittmp");
1308*1debfc3dSmrg 
1309*1debfc3dSmrg       phi_rest = create_phi_node (new_init, rest);
1310*1debfc3dSmrg 
1311*1debfc3dSmrg       add_phi_arg (phi_rest, init, precond_edge, UNKNOWN_LOCATION);
1312*1debfc3dSmrg       add_phi_arg (phi_rest, next, new_exit, UNKNOWN_LOCATION);
1313*1debfc3dSmrg       SET_USE (op, new_init);
1314*1debfc3dSmrg     }
1315*1debfc3dSmrg 
1316*1debfc3dSmrg   remove_path (exit);
1317*1debfc3dSmrg 
1318*1debfc3dSmrg   /* Transform the loop.  */
1319*1debfc3dSmrg   if (transform)
1320*1debfc3dSmrg     (*transform) (loop, data);
1321*1debfc3dSmrg 
1322*1debfc3dSmrg   /* Unroll the loop and remove the exits in all iterations except for the
1323*1debfc3dSmrg      last one.  */
1324*1debfc3dSmrg   auto_sbitmap wont_exit (factor);
1325*1debfc3dSmrg   bitmap_ones (wont_exit);
1326*1debfc3dSmrg   bitmap_clear_bit (wont_exit, factor - 1);
1327*1debfc3dSmrg 
1328*1debfc3dSmrg   ok = gimple_duplicate_loop_to_header_edge
1329*1debfc3dSmrg 	  (loop, loop_latch_edge (loop), factor - 1,
1330*1debfc3dSmrg 	   wont_exit, new_exit, &to_remove, DLTHE_FLAG_UPDATE_FREQ);
1331*1debfc3dSmrg   gcc_assert (ok);
1332*1debfc3dSmrg 
1333*1debfc3dSmrg   FOR_EACH_VEC_ELT (to_remove, i, e)
1334*1debfc3dSmrg     {
1335*1debfc3dSmrg       ok = remove_path (e);
1336*1debfc3dSmrg       gcc_assert (ok);
1337*1debfc3dSmrg     }
1338*1debfc3dSmrg   update_ssa (TODO_update_ssa);
1339*1debfc3dSmrg 
1340*1debfc3dSmrg   /* Ensure that the frequencies in the loop match the new estimated
1341*1debfc3dSmrg      number of iterations, and change the probability of the new
1342*1debfc3dSmrg      exit edge.  */
1343*1debfc3dSmrg 
1344*1debfc3dSmrg   freq_h = loop->header->count;
1345*1debfc3dSmrg   freq_e = (loop_preheader_edge (loop))->count;
1346*1debfc3dSmrg   /* Use frequency only if counts are zero.  */
1347*1debfc3dSmrg   if (freq_h == 0 && freq_e == 0)
1348*1debfc3dSmrg     {
1349*1debfc3dSmrg       freq_h = loop->header->frequency;
1350*1debfc3dSmrg       freq_e = EDGE_FREQUENCY (loop_preheader_edge (loop));
1351*1debfc3dSmrg     }
1352*1debfc3dSmrg   if (freq_h != 0)
1353*1debfc3dSmrg     {
1354*1debfc3dSmrg       gcov_type scale;
1355*1debfc3dSmrg       /* Avoid dropping loop body profile counter to 0 because of zero count
1356*1debfc3dSmrg 	 in loop's preheader.  */
1357*1debfc3dSmrg       freq_e = MAX (freq_e, 1);
1358*1debfc3dSmrg       /* This should not overflow.  */
1359*1debfc3dSmrg       scale = GCOV_COMPUTE_SCALE (freq_e * (new_est_niter + 1), freq_h);
1360*1debfc3dSmrg       scale_loop_frequencies (loop, scale, REG_BR_PROB_BASE);
1361*1debfc3dSmrg     }
1362*1debfc3dSmrg 
1363*1debfc3dSmrg   exit_bb = single_pred (loop->latch);
1364*1debfc3dSmrg   new_exit = find_edge (exit_bb, rest);
1365*1debfc3dSmrg   new_exit->count = loop_preheader_edge (loop)->count;
1366*1debfc3dSmrg   new_exit->probability = REG_BR_PROB_BASE / (new_est_niter + 1);
1367*1debfc3dSmrg 
1368*1debfc3dSmrg   rest->count += new_exit->count;
1369*1debfc3dSmrg   rest->frequency += EDGE_FREQUENCY (new_exit);
1370*1debfc3dSmrg 
1371*1debfc3dSmrg   new_nonexit = single_pred_edge (loop->latch);
1372*1debfc3dSmrg   prob = new_nonexit->probability;
1373*1debfc3dSmrg   new_nonexit->probability = REG_BR_PROB_BASE - new_exit->probability;
1374*1debfc3dSmrg   new_nonexit->count = exit_bb->count - new_exit->count;
1375*1debfc3dSmrg   if (new_nonexit->count < 0)
1376*1debfc3dSmrg     new_nonexit->count = 0;
1377*1debfc3dSmrg   if (prob > 0)
1378*1debfc3dSmrg     scale_bbs_frequencies_int (&loop->latch, 1, new_nonexit->probability,
1379*1debfc3dSmrg 			       prob);
1380*1debfc3dSmrg 
1381*1debfc3dSmrg   /* Finally create the new counter for number of iterations and add the new
1382*1debfc3dSmrg      exit instruction.  */
1383*1debfc3dSmrg   bsi = gsi_last_nondebug_bb (exit_bb);
1384*1debfc3dSmrg   exit_if = as_a <gcond *> (gsi_stmt (bsi));
1385*1debfc3dSmrg   create_iv (exit_base, exit_step, NULL_TREE, loop,
1386*1debfc3dSmrg 	     &bsi, false, &ctr_before, &ctr_after);
1387*1debfc3dSmrg   gimple_cond_set_code (exit_if, exit_cmp);
1388*1debfc3dSmrg   gimple_cond_set_lhs (exit_if, ctr_after);
1389*1debfc3dSmrg   gimple_cond_set_rhs (exit_if, exit_bound);
1390*1debfc3dSmrg   update_stmt (exit_if);
1391*1debfc3dSmrg 
1392*1debfc3dSmrg   checking_verify_flow_info ();
1393*1debfc3dSmrg   checking_verify_loop_structure ();
1394*1debfc3dSmrg   checking_verify_loop_closed_ssa (true);
1395*1debfc3dSmrg }
1396*1debfc3dSmrg 
1397*1debfc3dSmrg /* Wrapper over tree_transform_and_unroll_loop for case we do not
1398*1debfc3dSmrg    want to transform the loop before unrolling.  The meaning
1399*1debfc3dSmrg    of the arguments is the same as for tree_transform_and_unroll_loop.  */
1400*1debfc3dSmrg 
1401*1debfc3dSmrg void
1402*1debfc3dSmrg tree_unroll_loop (struct loop *loop, unsigned factor,
1403*1debfc3dSmrg 		  edge exit, struct tree_niter_desc *desc)
1404*1debfc3dSmrg {
1405*1debfc3dSmrg   tree_transform_and_unroll_loop (loop, factor, exit, desc,
1406*1debfc3dSmrg 				  NULL, NULL);
1407*1debfc3dSmrg }
1408*1debfc3dSmrg 
1409*1debfc3dSmrg /* Rewrite the phi node at position PSI in function of the main
1410*1debfc3dSmrg    induction variable MAIN_IV and insert the generated code at GSI.  */
1411*1debfc3dSmrg 
1412*1debfc3dSmrg static void
1413*1debfc3dSmrg rewrite_phi_with_iv (loop_p loop,
1414*1debfc3dSmrg 		     gphi_iterator *psi,
1415*1debfc3dSmrg 		     gimple_stmt_iterator *gsi,
1416*1debfc3dSmrg 		     tree main_iv)
1417*1debfc3dSmrg {
1418*1debfc3dSmrg   affine_iv iv;
1419*1debfc3dSmrg   gassign *stmt;
1420*1debfc3dSmrg   gphi *phi = psi->phi ();
1421*1debfc3dSmrg   tree atype, mtype, val, res = PHI_RESULT (phi);
1422*1debfc3dSmrg 
1423*1debfc3dSmrg   if (virtual_operand_p (res) || res == main_iv)
1424*1debfc3dSmrg     {
1425*1debfc3dSmrg       gsi_next (psi);
1426*1debfc3dSmrg       return;
1427*1debfc3dSmrg     }
1428*1debfc3dSmrg 
1429*1debfc3dSmrg   if (!simple_iv (loop, loop, res, &iv, true))
1430*1debfc3dSmrg     {
1431*1debfc3dSmrg       gsi_next (psi);
1432*1debfc3dSmrg       return;
1433*1debfc3dSmrg     }
1434*1debfc3dSmrg 
1435*1debfc3dSmrg   remove_phi_node (psi, false);
1436*1debfc3dSmrg 
1437*1debfc3dSmrg   atype = TREE_TYPE (res);
1438*1debfc3dSmrg   mtype = POINTER_TYPE_P (atype) ? sizetype : atype;
1439*1debfc3dSmrg   val = fold_build2 (MULT_EXPR, mtype, unshare_expr (iv.step),
1440*1debfc3dSmrg 		     fold_convert (mtype, main_iv));
1441*1debfc3dSmrg   val = fold_build2 (POINTER_TYPE_P (atype)
1442*1debfc3dSmrg 		     ? POINTER_PLUS_EXPR : PLUS_EXPR,
1443*1debfc3dSmrg 		     atype, unshare_expr (iv.base), val);
1444*1debfc3dSmrg   val = force_gimple_operand_gsi (gsi, val, false, NULL_TREE, true,
1445*1debfc3dSmrg 				  GSI_SAME_STMT);
1446*1debfc3dSmrg   stmt = gimple_build_assign (res, val);
1447*1debfc3dSmrg   gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1448*1debfc3dSmrg }
1449*1debfc3dSmrg 
1450*1debfc3dSmrg /* Rewrite all the phi nodes of LOOP in function of the main induction
1451*1debfc3dSmrg    variable MAIN_IV.  */
1452*1debfc3dSmrg 
1453*1debfc3dSmrg static void
1454*1debfc3dSmrg rewrite_all_phi_nodes_with_iv (loop_p loop, tree main_iv)
1455*1debfc3dSmrg {
1456*1debfc3dSmrg   unsigned i;
1457*1debfc3dSmrg   basic_block *bbs = get_loop_body_in_dom_order (loop);
1458*1debfc3dSmrg   gphi_iterator psi;
1459*1debfc3dSmrg 
1460*1debfc3dSmrg   for (i = 0; i < loop->num_nodes; i++)
1461*1debfc3dSmrg     {
1462*1debfc3dSmrg       basic_block bb = bbs[i];
1463*1debfc3dSmrg       gimple_stmt_iterator gsi = gsi_after_labels (bb);
1464*1debfc3dSmrg 
1465*1debfc3dSmrg       if (bb->loop_father != loop)
1466*1debfc3dSmrg 	continue;
1467*1debfc3dSmrg 
1468*1debfc3dSmrg       for (psi = gsi_start_phis (bb); !gsi_end_p (psi); )
1469*1debfc3dSmrg 	rewrite_phi_with_iv (loop, &psi, &gsi, main_iv);
1470*1debfc3dSmrg     }
1471*1debfc3dSmrg 
1472*1debfc3dSmrg   free (bbs);
1473*1debfc3dSmrg }
1474*1debfc3dSmrg 
1475*1debfc3dSmrg /* Bases all the induction variables in LOOP on a single induction variable
1476*1debfc3dSmrg    (with base 0 and step 1), whose final value is compared with *NIT.  When the
1477*1debfc3dSmrg    IV type precision has to be larger than *NIT type precision, *NIT is
1478*1debfc3dSmrg    converted to the larger type, the conversion code is inserted before the
1479*1debfc3dSmrg    loop, and *NIT is updated to the new definition.  When BUMP_IN_LATCH is true,
1480*1debfc3dSmrg    the induction variable is incremented in the loop latch, otherwise it is
1481*1debfc3dSmrg    incremented in the loop header.  Return the induction variable that was
1482*1debfc3dSmrg    created.  */
1483*1debfc3dSmrg 
1484*1debfc3dSmrg tree
1485*1debfc3dSmrg canonicalize_loop_ivs (struct loop *loop, tree *nit, bool bump_in_latch)
1486*1debfc3dSmrg {
1487*1debfc3dSmrg   unsigned precision = TYPE_PRECISION (TREE_TYPE (*nit));
1488*1debfc3dSmrg   unsigned original_precision = precision;
1489*1debfc3dSmrg   tree type, var_before;
1490*1debfc3dSmrg   gimple_stmt_iterator gsi;
1491*1debfc3dSmrg   gphi_iterator psi;
1492*1debfc3dSmrg   gcond *stmt;
1493*1debfc3dSmrg   edge exit = single_dom_exit (loop);
1494*1debfc3dSmrg   gimple_seq stmts;
1495*1debfc3dSmrg   machine_mode mode;
1496*1debfc3dSmrg   bool unsigned_p = false;
1497*1debfc3dSmrg 
1498*1debfc3dSmrg   for (psi = gsi_start_phis (loop->header);
1499*1debfc3dSmrg        !gsi_end_p (psi); gsi_next (&psi))
1500*1debfc3dSmrg     {
1501*1debfc3dSmrg       gphi *phi = psi.phi ();
1502*1debfc3dSmrg       tree res = PHI_RESULT (phi);
1503*1debfc3dSmrg       bool uns;
1504*1debfc3dSmrg 
1505*1debfc3dSmrg       type = TREE_TYPE (res);
1506*1debfc3dSmrg       if (virtual_operand_p (res)
1507*1debfc3dSmrg 	  || (!INTEGRAL_TYPE_P (type)
1508*1debfc3dSmrg 	      && !POINTER_TYPE_P (type))
1509*1debfc3dSmrg 	  || TYPE_PRECISION (type) < precision)
1510*1debfc3dSmrg 	continue;
1511*1debfc3dSmrg 
1512*1debfc3dSmrg       uns = POINTER_TYPE_P (type) | TYPE_UNSIGNED (type);
1513*1debfc3dSmrg 
1514*1debfc3dSmrg       if (TYPE_PRECISION (type) > precision)
1515*1debfc3dSmrg 	unsigned_p = uns;
1516*1debfc3dSmrg       else
1517*1debfc3dSmrg 	unsigned_p |= uns;
1518*1debfc3dSmrg 
1519*1debfc3dSmrg       precision = TYPE_PRECISION (type);
1520*1debfc3dSmrg     }
1521*1debfc3dSmrg 
1522*1debfc3dSmrg   mode = smallest_mode_for_size (precision, MODE_INT);
1523*1debfc3dSmrg   precision = GET_MODE_PRECISION (mode);
1524*1debfc3dSmrg   type = build_nonstandard_integer_type (precision, unsigned_p);
1525*1debfc3dSmrg 
1526*1debfc3dSmrg   if (original_precision != precision)
1527*1debfc3dSmrg     {
1528*1debfc3dSmrg       *nit = fold_convert (type, *nit);
1529*1debfc3dSmrg       *nit = force_gimple_operand (*nit, &stmts, true, NULL_TREE);
1530*1debfc3dSmrg       if (stmts)
1531*1debfc3dSmrg 	gsi_insert_seq_on_edge_immediate (loop_preheader_edge (loop), stmts);
1532*1debfc3dSmrg     }
1533*1debfc3dSmrg 
1534*1debfc3dSmrg   if (bump_in_latch)
1535*1debfc3dSmrg     gsi = gsi_last_bb (loop->latch);
1536*1debfc3dSmrg   else
1537*1debfc3dSmrg     gsi = gsi_last_nondebug_bb (loop->header);
1538*1debfc3dSmrg   create_iv (build_int_cst_type (type, 0), build_int_cst (type, 1), NULL_TREE,
1539*1debfc3dSmrg 	     loop, &gsi, bump_in_latch, &var_before, NULL);
1540*1debfc3dSmrg 
1541*1debfc3dSmrg   rewrite_all_phi_nodes_with_iv (loop, var_before);
1542*1debfc3dSmrg 
1543*1debfc3dSmrg   stmt = as_a <gcond *> (last_stmt (exit->src));
1544*1debfc3dSmrg   /* Make the loop exit if the control condition is not satisfied.  */
1545*1debfc3dSmrg   if (exit->flags & EDGE_TRUE_VALUE)
1546*1debfc3dSmrg     {
1547*1debfc3dSmrg       edge te, fe;
1548*1debfc3dSmrg 
1549*1debfc3dSmrg       extract_true_false_edges_from_block (exit->src, &te, &fe);
1550*1debfc3dSmrg       te->flags = EDGE_FALSE_VALUE;
1551*1debfc3dSmrg       fe->flags = EDGE_TRUE_VALUE;
1552*1debfc3dSmrg     }
1553*1debfc3dSmrg   gimple_cond_set_code (stmt, LT_EXPR);
1554*1debfc3dSmrg   gimple_cond_set_lhs (stmt, var_before);
1555*1debfc3dSmrg   gimple_cond_set_rhs (stmt, *nit);
1556*1debfc3dSmrg   update_stmt (stmt);
1557*1debfc3dSmrg 
1558*1debfc3dSmrg   return var_before;
1559*1debfc3dSmrg }
1560