xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/cfgloopmanip.c (revision cef8759bd76c1b621f8eab8faa6f208faabc2e15)
1 /* Loop manipulation code for GNU compiler.
2    Copyright (C) 2002-2017 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "cfganal.h"
29 #include "cfgloop.h"
30 #include "gimple-iterator.h"
31 #include "gimplify-me.h"
32 #include "tree-ssa-loop-manip.h"
33 #include "dumpfile.h"
34 
35 static void copy_loops_to (struct loop **, int,
36 			   struct loop *);
37 static void loop_redirect_edge (edge, basic_block);
38 static void remove_bbs (basic_block *, int);
39 static bool rpe_enum_p (const_basic_block, const void *);
40 static int find_path (edge, basic_block **);
41 static void fix_loop_placements (struct loop *, bool *);
42 static bool fix_bb_placement (basic_block);
43 static void fix_bb_placements (basic_block, bool *, bitmap);
44 
45 /* Checks whether basic block BB is dominated by DATA.  */
46 static bool
47 rpe_enum_p (const_basic_block bb, const void *data)
48 {
49   return dominated_by_p (CDI_DOMINATORS, bb, (const_basic_block) data);
50 }
51 
52 /* Remove basic blocks BBS.  NBBS is the number of the basic blocks.  */
53 
54 static void
55 remove_bbs (basic_block *bbs, int nbbs)
56 {
57   int i;
58 
59   for (i = 0; i < nbbs; i++)
60     delete_basic_block (bbs[i]);
61 }
62 
63 /* Find path -- i.e. the basic blocks dominated by edge E and put them
64    into array BBS, that will be allocated large enough to contain them.
65    E->dest must have exactly one predecessor for this to work (it is
66    easy to achieve and we do not put it here because we do not want to
67    alter anything by this function).  The number of basic blocks in the
68    path is returned.  */
69 static int
70 find_path (edge e, basic_block **bbs)
71 {
72   gcc_assert (EDGE_COUNT (e->dest->preds) <= 1);
73 
74   /* Find bbs in the path.  */
75   *bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
76   return dfs_enumerate_from (e->dest, 0, rpe_enum_p, *bbs,
77 			     n_basic_blocks_for_fn (cfun), e->dest);
78 }
79 
80 /* Fix placement of basic block BB inside loop hierarchy --
81    Let L be a loop to that BB belongs.  Then every successor of BB must either
82      1) belong to some superloop of loop L, or
83      2) be a header of loop K such that K->outer is superloop of L
84    Returns true if we had to move BB into other loop to enforce this condition,
85    false if the placement of BB was already correct (provided that placements
86    of its successors are correct).  */
87 static bool
88 fix_bb_placement (basic_block bb)
89 {
90   edge e;
91   edge_iterator ei;
92   struct loop *loop = current_loops->tree_root, *act;
93 
94   FOR_EACH_EDGE (e, ei, bb->succs)
95     {
96       if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
97 	continue;
98 
99       act = e->dest->loop_father;
100       if (act->header == e->dest)
101 	act = loop_outer (act);
102 
103       if (flow_loop_nested_p (loop, act))
104 	loop = act;
105     }
106 
107   if (loop == bb->loop_father)
108     return false;
109 
110   remove_bb_from_loops (bb);
111   add_bb_to_loop (bb, loop);
112 
113   return true;
114 }
115 
116 /* Fix placement of LOOP inside loop tree, i.e. find the innermost superloop
117    of LOOP to that leads at least one exit edge of LOOP, and set it
118    as the immediate superloop of LOOP.  Return true if the immediate superloop
119    of LOOP changed.
120 
121    IRRED_INVALIDATED is set to true if a change in the loop structures might
122    invalidate the information about irreducible regions.  */
123 
124 static bool
125 fix_loop_placement (struct loop *loop, bool *irred_invalidated)
126 {
127   unsigned i;
128   edge e;
129   vec<edge> exits = get_loop_exit_edges (loop);
130   struct loop *father = current_loops->tree_root, *act;
131   bool ret = false;
132 
133   FOR_EACH_VEC_ELT (exits, i, e)
134     {
135       act = find_common_loop (loop, e->dest->loop_father);
136       if (flow_loop_nested_p (father, act))
137 	father = act;
138     }
139 
140   if (father != loop_outer (loop))
141     {
142       for (act = loop_outer (loop); act != father; act = loop_outer (act))
143 	act->num_nodes -= loop->num_nodes;
144       flow_loop_tree_node_remove (loop);
145       flow_loop_tree_node_add (father, loop);
146 
147       /* The exit edges of LOOP no longer exits its original immediate
148 	 superloops; remove them from the appropriate exit lists.  */
149       FOR_EACH_VEC_ELT (exits, i, e)
150 	{
151 	  /* We may need to recompute irreducible loops.  */
152 	  if (e->flags & EDGE_IRREDUCIBLE_LOOP)
153 	    *irred_invalidated = true;
154 	  rescan_loop_exit (e, false, false);
155 	}
156 
157       ret = true;
158     }
159 
160   exits.release ();
161   return ret;
162 }
163 
164 /* Fix placements of basic blocks inside loop hierarchy stored in loops; i.e.
165    enforce condition stated in description of fix_bb_placement. We
166    start from basic block FROM that had some of its successors removed, so that
167    his placement no longer has to be correct, and iteratively fix placement of
168    its predecessors that may change if placement of FROM changed.  Also fix
169    placement of subloops of FROM->loop_father, that might also be altered due
170    to this change; the condition for them is similar, except that instead of
171    successors we consider edges coming out of the loops.
172 
173    If the changes may invalidate the information about irreducible regions,
174    IRRED_INVALIDATED is set to true.
175 
176    If LOOP_CLOSED_SSA_INVLIDATED is non-zero then all basic blocks with
177    changed loop_father are collected there. */
178 
179 static void
180 fix_bb_placements (basic_block from,
181 		   bool *irred_invalidated,
182 		   bitmap loop_closed_ssa_invalidated)
183 {
184   basic_block *queue, *qtop, *qbeg, *qend;
185   struct loop *base_loop, *target_loop;
186   edge e;
187 
188   /* We pass through blocks back-reachable from FROM, testing whether some
189      of their successors moved to outer loop.  It may be necessary to
190      iterate several times, but it is finite, as we stop unless we move
191      the basic block up the loop structure.  The whole story is a bit
192      more complicated due to presence of subloops, those are moved using
193      fix_loop_placement.  */
194 
195   base_loop = from->loop_father;
196   /* If we are already in the outermost loop, the basic blocks cannot be moved
197      outside of it.  If FROM is the header of the base loop, it cannot be moved
198      outside of it, either.  In both cases, we can end now.  */
199   if (base_loop == current_loops->tree_root
200       || from == base_loop->header)
201     return;
202 
203   auto_sbitmap in_queue (last_basic_block_for_fn (cfun));
204   bitmap_clear (in_queue);
205   bitmap_set_bit (in_queue, from->index);
206   /* Prevent us from going out of the base_loop.  */
207   bitmap_set_bit (in_queue, base_loop->header->index);
208 
209   queue = XNEWVEC (basic_block, base_loop->num_nodes + 1);
210   qtop = queue + base_loop->num_nodes + 1;
211   qbeg = queue;
212   qend = queue + 1;
213   *qbeg = from;
214 
215   while (qbeg != qend)
216     {
217       edge_iterator ei;
218       from = *qbeg;
219       qbeg++;
220       if (qbeg == qtop)
221 	qbeg = queue;
222       bitmap_clear_bit (in_queue, from->index);
223 
224       if (from->loop_father->header == from)
225 	{
226 	  /* Subloop header, maybe move the loop upward.  */
227 	  if (!fix_loop_placement (from->loop_father, irred_invalidated))
228 	    continue;
229 	  target_loop = loop_outer (from->loop_father);
230 	  if (loop_closed_ssa_invalidated)
231 	    {
232 	      basic_block *bbs = get_loop_body (from->loop_father);
233 	      for (unsigned i = 0; i < from->loop_father->num_nodes; ++i)
234 		bitmap_set_bit (loop_closed_ssa_invalidated, bbs[i]->index);
235 	      free (bbs);
236 	    }
237 	}
238       else
239 	{
240 	  /* Ordinary basic block.  */
241 	  if (!fix_bb_placement (from))
242 	    continue;
243 	  target_loop = from->loop_father;
244 	  if (loop_closed_ssa_invalidated)
245 	    bitmap_set_bit (loop_closed_ssa_invalidated, from->index);
246 	}
247 
248       FOR_EACH_EDGE (e, ei, from->succs)
249 	{
250 	  if (e->flags & EDGE_IRREDUCIBLE_LOOP)
251 	    *irred_invalidated = true;
252 	}
253 
254       /* Something has changed, insert predecessors into queue.  */
255       FOR_EACH_EDGE (e, ei, from->preds)
256 	{
257 	  basic_block pred = e->src;
258 	  struct loop *nca;
259 
260 	  if (e->flags & EDGE_IRREDUCIBLE_LOOP)
261 	    *irred_invalidated = true;
262 
263 	  if (bitmap_bit_p (in_queue, pred->index))
264 	    continue;
265 
266 	  /* If it is subloop, then it either was not moved, or
267 	     the path up the loop tree from base_loop do not contain
268 	     it.  */
269 	  nca = find_common_loop (pred->loop_father, base_loop);
270 	  if (pred->loop_father != base_loop
271 	      && (nca == base_loop
272 		  || nca != pred->loop_father))
273 	    pred = pred->loop_father->header;
274 	  else if (!flow_loop_nested_p (target_loop, pred->loop_father))
275 	    {
276 	      /* If PRED is already higher in the loop hierarchy than the
277 		 TARGET_LOOP to that we moved FROM, the change of the position
278 		 of FROM does not affect the position of PRED, so there is no
279 		 point in processing it.  */
280 	      continue;
281 	    }
282 
283 	  if (bitmap_bit_p (in_queue, pred->index))
284 	    continue;
285 
286 	  /* Schedule the basic block.  */
287 	  *qend = pred;
288 	  qend++;
289 	  if (qend == qtop)
290 	    qend = queue;
291 	  bitmap_set_bit (in_queue, pred->index);
292 	}
293     }
294   free (queue);
295 }
296 
297 /* Removes path beginning at edge E, i.e. remove basic blocks dominated by E
298    and update loop structures and dominators.  Return true if we were able
299    to remove the path, false otherwise (and nothing is affected then).  */
300 bool
301 remove_path (edge e, bool *irred_invalidated,
302 	     bitmap loop_closed_ssa_invalidated)
303 {
304   edge ae;
305   basic_block *rem_bbs, *bord_bbs, from, bb;
306   vec<basic_block> dom_bbs;
307   int i, nrem, n_bord_bbs;
308   bool local_irred_invalidated = false;
309   edge_iterator ei;
310   struct loop *l, *f;
311 
312   if (! irred_invalidated)
313     irred_invalidated = &local_irred_invalidated;
314 
315   if (!can_remove_branch_p (e))
316     return false;
317 
318   /* Keep track of whether we need to update information about irreducible
319      regions.  This is the case if the removed area is a part of the
320      irreducible region, or if the set of basic blocks that belong to a loop
321      that is inside an irreducible region is changed, or if such a loop is
322      removed.  */
323   if (e->flags & EDGE_IRREDUCIBLE_LOOP)
324     *irred_invalidated = true;
325 
326   /* We need to check whether basic blocks are dominated by the edge
327      e, but we only have basic block dominators.  This is easy to
328      fix -- when e->dest has exactly one predecessor, this corresponds
329      to blocks dominated by e->dest, if not, split the edge.  */
330   if (!single_pred_p (e->dest))
331     e = single_pred_edge (split_edge (e));
332 
333   /* It may happen that by removing path we remove one or more loops
334      we belong to.  In this case first unloop the loops, then proceed
335      normally.   We may assume that e->dest is not a header of any loop,
336      as it now has exactly one predecessor.  */
337   for (l = e->src->loop_father; loop_outer (l); l = f)
338     {
339       f = loop_outer (l);
340       if (dominated_by_p (CDI_DOMINATORS, l->latch, e->dest))
341         unloop (l, irred_invalidated, loop_closed_ssa_invalidated);
342     }
343 
344   /* Identify the path.  */
345   nrem = find_path (e, &rem_bbs);
346 
347   n_bord_bbs = 0;
348   bord_bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
349   auto_sbitmap seen (last_basic_block_for_fn (cfun));
350   bitmap_clear (seen);
351 
352   /* Find "border" hexes -- i.e. those with predecessor in removed path.  */
353   for (i = 0; i < nrem; i++)
354     bitmap_set_bit (seen, rem_bbs[i]->index);
355   if (!*irred_invalidated)
356     FOR_EACH_EDGE (ae, ei, e->src->succs)
357       if (ae != e && ae->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
358 	  && !bitmap_bit_p (seen, ae->dest->index)
359 	  && ae->flags & EDGE_IRREDUCIBLE_LOOP)
360 	{
361 	  *irred_invalidated = true;
362 	  break;
363 	}
364 
365   for (i = 0; i < nrem; i++)
366     {
367       bb = rem_bbs[i];
368       FOR_EACH_EDGE (ae, ei, rem_bbs[i]->succs)
369 	if (ae->dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
370 	    && !bitmap_bit_p (seen, ae->dest->index))
371 	  {
372 	    bitmap_set_bit (seen, ae->dest->index);
373 	    bord_bbs[n_bord_bbs++] = ae->dest;
374 
375 	    if (ae->flags & EDGE_IRREDUCIBLE_LOOP)
376 	      *irred_invalidated = true;
377 	  }
378     }
379 
380   /* Remove the path.  */
381   from = e->src;
382   remove_branch (e);
383   dom_bbs.create (0);
384 
385   /* Cancel loops contained in the path.  */
386   for (i = 0; i < nrem; i++)
387     if (rem_bbs[i]->loop_father->header == rem_bbs[i])
388       cancel_loop_tree (rem_bbs[i]->loop_father);
389 
390   remove_bbs (rem_bbs, nrem);
391   free (rem_bbs);
392 
393   /* Find blocks whose dominators may be affected.  */
394   bitmap_clear (seen);
395   for (i = 0; i < n_bord_bbs; i++)
396     {
397       basic_block ldom;
398 
399       bb = get_immediate_dominator (CDI_DOMINATORS, bord_bbs[i]);
400       if (bitmap_bit_p (seen, bb->index))
401 	continue;
402       bitmap_set_bit (seen, bb->index);
403 
404       for (ldom = first_dom_son (CDI_DOMINATORS, bb);
405 	   ldom;
406 	   ldom = next_dom_son (CDI_DOMINATORS, ldom))
407 	if (!dominated_by_p (CDI_DOMINATORS, from, ldom))
408 	  dom_bbs.safe_push (ldom);
409     }
410 
411   /* Recount dominators.  */
412   iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, true);
413   dom_bbs.release ();
414   free (bord_bbs);
415 
416   /* Fix placements of basic blocks inside loops and the placement of
417      loops in the loop tree.  */
418   fix_bb_placements (from, irred_invalidated, loop_closed_ssa_invalidated);
419   fix_loop_placements (from->loop_father, irred_invalidated);
420 
421   if (local_irred_invalidated
422       && loops_state_satisfies_p (LOOPS_HAVE_MARKED_IRREDUCIBLE_REGIONS))
423     mark_irreducible_loops ();
424 
425   return true;
426 }
427 
428 /* Creates place for a new LOOP in loops structure of FN.  */
429 
430 void
431 place_new_loop (struct function *fn, struct loop *loop)
432 {
433   loop->num = number_of_loops (fn);
434   vec_safe_push (loops_for_fn (fn)->larray, loop);
435 }
436 
437 /* Given LOOP structure with filled header and latch, find the body of the
438    corresponding loop and add it to loops tree.  Insert the LOOP as a son of
439    outer.  */
440 
441 void
442 add_loop (struct loop *loop, struct loop *outer)
443 {
444   basic_block *bbs;
445   int i, n;
446   struct loop *subloop;
447   edge e;
448   edge_iterator ei;
449 
450   /* Add it to loop structure.  */
451   place_new_loop (cfun, loop);
452   flow_loop_tree_node_add (outer, loop);
453 
454   /* Find its nodes.  */
455   bbs = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
456   n = get_loop_body_with_size (loop, bbs, n_basic_blocks_for_fn (cfun));
457 
458   for (i = 0; i < n; i++)
459     {
460       if (bbs[i]->loop_father == outer)
461 	{
462 	  remove_bb_from_loops (bbs[i]);
463 	  add_bb_to_loop (bbs[i], loop);
464 	  continue;
465 	}
466 
467       loop->num_nodes++;
468 
469       /* If we find a direct subloop of OUTER, move it to LOOP.  */
470       subloop = bbs[i]->loop_father;
471       if (loop_outer (subloop) == outer
472 	  && subloop->header == bbs[i])
473 	{
474 	  flow_loop_tree_node_remove (subloop);
475 	  flow_loop_tree_node_add (loop, subloop);
476 	}
477     }
478 
479   /* Update the information about loop exit edges.  */
480   for (i = 0; i < n; i++)
481     {
482       FOR_EACH_EDGE (e, ei, bbs[i]->succs)
483 	{
484 	  rescan_loop_exit (e, false, false);
485 	}
486     }
487 
488   free (bbs);
489 }
490 
491 /* Multiply all frequencies in LOOP by NUM/DEN.  */
492 
493 void
494 scale_loop_frequencies (struct loop *loop, int num, int den)
495 {
496   basic_block *bbs;
497 
498   bbs = get_loop_body (loop);
499   scale_bbs_frequencies_int (bbs, loop->num_nodes, num, den);
500   free (bbs);
501 }
502 
503 /* Multiply all frequencies in LOOP by SCALE/REG_BR_PROB_BASE.
504    If ITERATION_BOUND is non-zero, scale even further if loop is predicted
505    to iterate too many times.  */
506 
507 void
508 scale_loop_profile (struct loop *loop, int scale, gcov_type iteration_bound)
509 {
510   gcov_type iterations = expected_loop_iterations_unbounded (loop);
511   edge e;
512   edge_iterator ei;
513 
514   if (dump_file && (dump_flags & TDF_DETAILS))
515     fprintf (dump_file, ";; Scaling loop %i with scale %f, "
516 	     "bounding iterations to %i from guessed %i\n",
517 	     loop->num, (double)scale / REG_BR_PROB_BASE,
518 	     (int)iteration_bound, (int)iterations);
519 
520   /* See if loop is predicted to iterate too many times.  */
521   if (iteration_bound && iterations > 0
522       && apply_probability (iterations, scale) > iteration_bound)
523     {
524       /* Fixing loop profile for different trip count is not trivial; the exit
525 	 probabilities has to be updated to match and frequencies propagated down
526 	 to the loop body.
527 
528 	 We fully update only the simple case of loop with single exit that is
529 	 either from the latch or BB just before latch and leads from BB with
530 	 simple conditional jump.   This is OK for use in vectorizer.  */
531       e = single_exit (loop);
532       if (e)
533 	{
534 	  edge other_e;
535 	  int freq_delta;
536 	  gcov_type count_delta;
537 
538           FOR_EACH_EDGE (other_e, ei, e->src->succs)
539 	    if (!(other_e->flags & (EDGE_ABNORMAL | EDGE_FAKE))
540 		&& e != other_e)
541 	      break;
542 
543 	  /* Probability of exit must be 1/iterations.  */
544 	  freq_delta = EDGE_FREQUENCY (e);
545 	  e->probability = REG_BR_PROB_BASE / iteration_bound;
546 	  other_e->probability = inverse_probability (e->probability);
547 	  freq_delta -= EDGE_FREQUENCY (e);
548 
549 	  /* Adjust counts accordingly.  */
550 	  count_delta = e->count;
551 	  e->count = apply_probability (e->src->count, e->probability);
552 	  other_e->count = apply_probability (e->src->count, other_e->probability);
553 	  count_delta -= e->count;
554 
555 	  /* If latch exists, change its frequency and count, since we changed
556 	     probability of exit.  Theoretically we should update everything from
557 	     source of exit edge to latch, but for vectorizer this is enough.  */
558 	  if (loop->latch
559 	      && loop->latch != e->src)
560 	    {
561 	      loop->latch->frequency += freq_delta;
562 	      if (loop->latch->frequency < 0)
563 		loop->latch->frequency = 0;
564 	      loop->latch->count += count_delta;
565 	      if (loop->latch->count < 0)
566 		loop->latch->count = 0;
567 	    }
568 	}
569 
570       /* Roughly speaking we want to reduce the loop body profile by the
571 	 difference of loop iterations.  We however can do better if
572 	 we look at the actual profile, if it is available.  */
573       scale = RDIV (iteration_bound * scale, iterations);
574       if (loop->header->count)
575 	{
576 	  gcov_type count_in = 0;
577 
578 	  FOR_EACH_EDGE (e, ei, loop->header->preds)
579 	    if (e->src != loop->latch)
580 	      count_in += e->count;
581 
582 	  if (count_in != 0)
583 	    scale = GCOV_COMPUTE_SCALE (count_in * iteration_bound,
584                                         loop->header->count);
585 	}
586       else if (loop->header->frequency)
587 	{
588 	  int freq_in = 0;
589 
590 	  FOR_EACH_EDGE (e, ei, loop->header->preds)
591 	    if (e->src != loop->latch)
592 	      freq_in += EDGE_FREQUENCY (e);
593 
594 	  if (freq_in != 0)
595 	    scale = GCOV_COMPUTE_SCALE (freq_in * iteration_bound,
596                                         loop->header->frequency);
597 	}
598       if (!scale)
599 	scale = 1;
600     }
601 
602   if (scale == REG_BR_PROB_BASE)
603     return;
604 
605   /* Scale the actual probabilities.  */
606   scale_loop_frequencies (loop, scale, REG_BR_PROB_BASE);
607   if (dump_file && (dump_flags & TDF_DETAILS))
608     fprintf (dump_file, ";; guessed iterations are now %i\n",
609 	     (int)expected_loop_iterations_unbounded (loop));
610 }
611 
612 /* Recompute dominance information for basic blocks outside LOOP.  */
613 
614 static void
615 update_dominators_in_loop (struct loop *loop)
616 {
617   vec<basic_block> dom_bbs = vNULL;
618   basic_block *body;
619   unsigned i;
620 
621   auto_sbitmap seen (last_basic_block_for_fn (cfun));
622   bitmap_clear (seen);
623   body = get_loop_body (loop);
624 
625   for (i = 0; i < loop->num_nodes; i++)
626     bitmap_set_bit (seen, body[i]->index);
627 
628   for (i = 0; i < loop->num_nodes; i++)
629     {
630       basic_block ldom;
631 
632       for (ldom = first_dom_son (CDI_DOMINATORS, body[i]);
633 	   ldom;
634 	   ldom = next_dom_son (CDI_DOMINATORS, ldom))
635 	if (!bitmap_bit_p (seen, ldom->index))
636 	  {
637 	    bitmap_set_bit (seen, ldom->index);
638 	    dom_bbs.safe_push (ldom);
639 	  }
640     }
641 
642   iterate_fix_dominators (CDI_DOMINATORS, dom_bbs, false);
643   free (body);
644   dom_bbs.release ();
645 }
646 
647 /* Creates an if region as shown above. CONDITION is used to create
648    the test for the if.
649 
650    |
651    |     -------------                 -------------
652    |     |  pred_bb  |                 |  pred_bb  |
653    |     -------------                 -------------
654    |           |                             |
655    |           |                             | ENTRY_EDGE
656    |           | ENTRY_EDGE                  V
657    |           |             ====>     -------------
658    |           |                       |  cond_bb  |
659    |           |                       | CONDITION |
660    |           |                       -------------
661    |           V                        /         \
662    |     -------------         e_false /           \ e_true
663    |     |  succ_bb  |                V             V
664    |     -------------         -----------       -----------
665    |                           | false_bb |      | true_bb |
666    |                           -----------       -----------
667    |                                   \           /
668    |                                    \         /
669    |                                     V       V
670    |                                   -------------
671    |                                   |  join_bb  |
672    |                                   -------------
673    |                                         | exit_edge (result)
674    |                                         V
675    |                                    -----------
676    |                                    | succ_bb |
677    |                                    -----------
678    |
679  */
680 
681 edge
682 create_empty_if_region_on_edge (edge entry_edge, tree condition)
683 {
684 
685   basic_block cond_bb, true_bb, false_bb, join_bb;
686   edge e_true, e_false, exit_edge;
687   gcond *cond_stmt;
688   tree simple_cond;
689   gimple_stmt_iterator gsi;
690 
691   cond_bb = split_edge (entry_edge);
692 
693   /* Insert condition in cond_bb.  */
694   gsi = gsi_last_bb (cond_bb);
695   simple_cond =
696     force_gimple_operand_gsi (&gsi, condition, true, NULL,
697 			      false, GSI_NEW_STMT);
698   cond_stmt = gimple_build_cond_from_tree (simple_cond, NULL_TREE, NULL_TREE);
699   gsi = gsi_last_bb (cond_bb);
700   gsi_insert_after (&gsi, cond_stmt, GSI_NEW_STMT);
701 
702   join_bb = split_edge (single_succ_edge (cond_bb));
703 
704   e_true = single_succ_edge (cond_bb);
705   true_bb = split_edge (e_true);
706 
707   e_false = make_edge (cond_bb, join_bb, 0);
708   false_bb = split_edge (e_false);
709 
710   e_true->flags &= ~EDGE_FALLTHRU;
711   e_true->flags |= EDGE_TRUE_VALUE;
712   e_false->flags &= ~EDGE_FALLTHRU;
713   e_false->flags |= EDGE_FALSE_VALUE;
714 
715   set_immediate_dominator (CDI_DOMINATORS, cond_bb, entry_edge->src);
716   set_immediate_dominator (CDI_DOMINATORS, true_bb, cond_bb);
717   set_immediate_dominator (CDI_DOMINATORS, false_bb, cond_bb);
718   set_immediate_dominator (CDI_DOMINATORS, join_bb, cond_bb);
719 
720   exit_edge = single_succ_edge (join_bb);
721 
722   if (single_pred_p (exit_edge->dest))
723     set_immediate_dominator (CDI_DOMINATORS, exit_edge->dest, join_bb);
724 
725   return exit_edge;
726 }
727 
728 /* create_empty_loop_on_edge
729    |
730    |    - pred_bb -                   ------ pred_bb ------
731    |   |           |                 | iv0 = initial_value |
732    |    -----|-----                   ---------|-----------
733    |         |                       ______    | entry_edge
734    |         | entry_edge           /      |   |
735    |         |             ====>   |      -V---V- loop_header -------------
736    |         V                     |     | iv_before = phi (iv0, iv_after) |
737    |    - succ_bb -                |      ---|-----------------------------
738    |   |           |               |         |
739    |    -----------                |      ---V--- loop_body ---------------
740    |                               |     | iv_after = iv_before + stride   |
741    |                               |     | if (iv_before < upper_bound)    |
742    |                               |      ---|--------------\--------------
743    |                               |         |               \ exit_e
744    |                               |         V                \
745    |                               |       - loop_latch -      V- succ_bb -
746    |                               |      |              |     |           |
747    |                               |       /-------------       -----------
748    |                                \ ___ /
749 
750    Creates an empty loop as shown above, the IV_BEFORE is the SSA_NAME
751    that is used before the increment of IV. IV_BEFORE should be used for
752    adding code to the body that uses the IV.  OUTER is the outer loop in
753    which the new loop should be inserted.
754 
755    Both INITIAL_VALUE and UPPER_BOUND expressions are gimplified and
756    inserted on the loop entry edge.  This implies that this function
757    should be used only when the UPPER_BOUND expression is a loop
758    invariant.  */
759 
760 struct loop *
761 create_empty_loop_on_edge (edge entry_edge,
762 			   tree initial_value,
763 			   tree stride, tree upper_bound,
764 			   tree iv,
765 			   tree *iv_before,
766 			   tree *iv_after,
767 			   struct loop *outer)
768 {
769   basic_block loop_header, loop_latch, succ_bb, pred_bb;
770   struct loop *loop;
771   gimple_stmt_iterator gsi;
772   gimple_seq stmts;
773   gcond *cond_expr;
774   tree exit_test;
775   edge exit_e;
776   int prob;
777 
778   gcc_assert (entry_edge && initial_value && stride && upper_bound && iv);
779 
780   /* Create header, latch and wire up the loop.  */
781   pred_bb = entry_edge->src;
782   loop_header = split_edge (entry_edge);
783   loop_latch = split_edge (single_succ_edge (loop_header));
784   succ_bb = single_succ (loop_latch);
785   make_edge (loop_header, succ_bb, 0);
786   redirect_edge_succ_nodup (single_succ_edge (loop_latch), loop_header);
787 
788   /* Set immediate dominator information.  */
789   set_immediate_dominator (CDI_DOMINATORS, loop_header, pred_bb);
790   set_immediate_dominator (CDI_DOMINATORS, loop_latch, loop_header);
791   set_immediate_dominator (CDI_DOMINATORS, succ_bb, loop_header);
792 
793   /* Initialize a loop structure and put it in a loop hierarchy.  */
794   loop = alloc_loop ();
795   loop->header = loop_header;
796   loop->latch = loop_latch;
797   add_loop (loop, outer);
798 
799   /* TODO: Fix frequencies and counts.  */
800   prob = REG_BR_PROB_BASE / 2;
801 
802   scale_loop_frequencies (loop, REG_BR_PROB_BASE - prob, REG_BR_PROB_BASE);
803 
804   /* Update dominators.  */
805   update_dominators_in_loop (loop);
806 
807   /* Modify edge flags.  */
808   exit_e = single_exit (loop);
809   exit_e->flags = EDGE_LOOP_EXIT | EDGE_FALSE_VALUE;
810   single_pred_edge (loop_latch)->flags = EDGE_TRUE_VALUE;
811 
812   /* Construct IV code in loop.  */
813   initial_value = force_gimple_operand (initial_value, &stmts, true, iv);
814   if (stmts)
815     {
816       gsi_insert_seq_on_edge (loop_preheader_edge (loop), stmts);
817       gsi_commit_edge_inserts ();
818     }
819 
820   upper_bound = force_gimple_operand (upper_bound, &stmts, true, NULL);
821   if (stmts)
822     {
823       gsi_insert_seq_on_edge (loop_preheader_edge (loop), stmts);
824       gsi_commit_edge_inserts ();
825     }
826 
827   gsi = gsi_last_bb (loop_header);
828   create_iv (initial_value, stride, iv, loop, &gsi, false,
829 	     iv_before, iv_after);
830 
831   /* Insert loop exit condition.  */
832   cond_expr = gimple_build_cond
833     (LT_EXPR, *iv_before, upper_bound, NULL_TREE, NULL_TREE);
834 
835   exit_test = gimple_cond_lhs (cond_expr);
836   exit_test = force_gimple_operand_gsi (&gsi, exit_test, true, NULL,
837 					false, GSI_NEW_STMT);
838   gimple_cond_set_lhs (cond_expr, exit_test);
839   gsi = gsi_last_bb (exit_e->src);
840   gsi_insert_after (&gsi, cond_expr, GSI_NEW_STMT);
841 
842   split_block_after_labels (loop_header);
843 
844   return loop;
845 }
846 
847 /* Make area between HEADER_EDGE and LATCH_EDGE a loop by connecting
848    latch to header and update loop tree and dominators
849    accordingly. Everything between them plus LATCH_EDGE destination must
850    be dominated by HEADER_EDGE destination, and back-reachable from
851    LATCH_EDGE source.  HEADER_EDGE is redirected to basic block SWITCH_BB,
852    FALSE_EDGE of SWITCH_BB to original destination of HEADER_EDGE and
853    TRUE_EDGE of SWITCH_BB to original destination of LATCH_EDGE.
854    Returns the newly created loop.  Frequencies and counts in the new loop
855    are scaled by FALSE_SCALE and in the old one by TRUE_SCALE.  */
856 
857 struct loop *
858 loopify (edge latch_edge, edge header_edge,
859 	 basic_block switch_bb, edge true_edge, edge false_edge,
860 	 bool redirect_all_edges, unsigned true_scale, unsigned false_scale)
861 {
862   basic_block succ_bb = latch_edge->dest;
863   basic_block pred_bb = header_edge->src;
864   struct loop *loop = alloc_loop ();
865   struct loop *outer = loop_outer (succ_bb->loop_father);
866   int freq;
867   gcov_type cnt;
868   edge e;
869   edge_iterator ei;
870 
871   loop->header = header_edge->dest;
872   loop->latch = latch_edge->src;
873 
874   freq = EDGE_FREQUENCY (header_edge);
875   cnt = header_edge->count;
876 
877   /* Redirect edges.  */
878   loop_redirect_edge (latch_edge, loop->header);
879   loop_redirect_edge (true_edge, succ_bb);
880 
881   /* During loop versioning, one of the switch_bb edge is already properly
882      set. Do not redirect it again unless redirect_all_edges is true.  */
883   if (redirect_all_edges)
884     {
885       loop_redirect_edge (header_edge, switch_bb);
886       loop_redirect_edge (false_edge, loop->header);
887 
888       /* Update dominators.  */
889       set_immediate_dominator (CDI_DOMINATORS, switch_bb, pred_bb);
890       set_immediate_dominator (CDI_DOMINATORS, loop->header, switch_bb);
891     }
892 
893   set_immediate_dominator (CDI_DOMINATORS, succ_bb, switch_bb);
894 
895   /* Compute new loop.  */
896   add_loop (loop, outer);
897 
898   /* Add switch_bb to appropriate loop.  */
899   if (switch_bb->loop_father)
900     remove_bb_from_loops (switch_bb);
901   add_bb_to_loop (switch_bb, outer);
902 
903   /* Fix frequencies.  */
904   if (redirect_all_edges)
905     {
906       switch_bb->frequency = freq;
907       switch_bb->count = cnt;
908       FOR_EACH_EDGE (e, ei, switch_bb->succs)
909 	{
910 	  e->count = apply_probability (switch_bb->count, e->probability);
911 	}
912     }
913   scale_loop_frequencies (loop, false_scale, REG_BR_PROB_BASE);
914   scale_loop_frequencies (succ_bb->loop_father, true_scale, REG_BR_PROB_BASE);
915   update_dominators_in_loop (loop);
916 
917   return loop;
918 }
919 
920 /* Remove the latch edge of a LOOP and update loops to indicate that
921    the LOOP was removed.  After this function, original loop latch will
922    have no successor, which caller is expected to fix somehow.
923 
924    If this may cause the information about irreducible regions to become
925    invalid, IRRED_INVALIDATED is set to true.
926 
927    LOOP_CLOSED_SSA_INVALIDATED, if non-NULL, is a bitmap where we store
928    basic blocks that had non-trivial update on their loop_father.*/
929 
930 void
931 unloop (struct loop *loop, bool *irred_invalidated,
932 	bitmap loop_closed_ssa_invalidated)
933 {
934   basic_block *body;
935   struct loop *ploop;
936   unsigned i, n;
937   basic_block latch = loop->latch;
938   bool dummy = false;
939 
940   if (loop_preheader_edge (loop)->flags & EDGE_IRREDUCIBLE_LOOP)
941     *irred_invalidated = true;
942 
943   /* This is relatively straightforward.  The dominators are unchanged, as
944      loop header dominates loop latch, so the only thing we have to care of
945      is the placement of loops and basic blocks inside the loop tree.  We
946      move them all to the loop->outer, and then let fix_bb_placements do
947      its work.  */
948 
949   body = get_loop_body (loop);
950   n = loop->num_nodes;
951   for (i = 0; i < n; i++)
952     if (body[i]->loop_father == loop)
953       {
954 	remove_bb_from_loops (body[i]);
955 	add_bb_to_loop (body[i], loop_outer (loop));
956       }
957   free (body);
958 
959   while (loop->inner)
960     {
961       ploop = loop->inner;
962       flow_loop_tree_node_remove (ploop);
963       flow_loop_tree_node_add (loop_outer (loop), ploop);
964     }
965 
966   /* Remove the loop and free its data.  */
967   delete_loop (loop);
968 
969   remove_edge (single_succ_edge (latch));
970 
971   /* We do not pass IRRED_INVALIDATED to fix_bb_placements here, as even if
972      there is an irreducible region inside the cancelled loop, the flags will
973      be still correct.  */
974   fix_bb_placements (latch, &dummy, loop_closed_ssa_invalidated);
975 }
976 
977 /* Fix placement of superloops of LOOP inside loop tree, i.e. ensure that
978    condition stated in description of fix_loop_placement holds for them.
979    It is used in case when we removed some edges coming out of LOOP, which
980    may cause the right placement of LOOP inside loop tree to change.
981 
982    IRRED_INVALIDATED is set to true if a change in the loop structures might
983    invalidate the information about irreducible regions.  */
984 
985 static void
986 fix_loop_placements (struct loop *loop, bool *irred_invalidated)
987 {
988   struct loop *outer;
989 
990   while (loop_outer (loop))
991     {
992       outer = loop_outer (loop);
993       if (!fix_loop_placement (loop, irred_invalidated))
994 	break;
995 
996       /* Changing the placement of a loop in the loop tree may alter the
997 	 validity of condition 2) of the description of fix_bb_placement
998 	 for its preheader, because the successor is the header and belongs
999 	 to the loop.  So call fix_bb_placements to fix up the placement
1000 	 of the preheader and (possibly) of its predecessors.  */
1001       fix_bb_placements (loop_preheader_edge (loop)->src,
1002 			 irred_invalidated, NULL);
1003       loop = outer;
1004     }
1005 }
1006 
1007 /* Duplicate loop bounds and other information we store about
1008    the loop into its duplicate.  */
1009 
1010 void
1011 copy_loop_info (struct loop *loop, struct loop *target)
1012 {
1013   gcc_checking_assert (!target->any_upper_bound && !target->any_estimate);
1014   target->any_upper_bound = loop->any_upper_bound;
1015   target->nb_iterations_upper_bound = loop->nb_iterations_upper_bound;
1016   target->any_likely_upper_bound = loop->any_likely_upper_bound;
1017   target->nb_iterations_likely_upper_bound
1018     = loop->nb_iterations_likely_upper_bound;
1019   target->any_estimate = loop->any_estimate;
1020   target->nb_iterations_estimate = loop->nb_iterations_estimate;
1021   target->estimate_state = loop->estimate_state;
1022   target->constraints = loop->constraints;
1023   target->warned_aggressive_loop_optimizations
1024     |= loop->warned_aggressive_loop_optimizations;
1025   target->in_oacc_kernels_region = loop->in_oacc_kernels_region;
1026   target->owned_clique = loop->owned_clique;
1027 }
1028 
1029 /* Copies copy of LOOP as subloop of TARGET loop, placing newly
1030    created loop into loops structure.  */
1031 struct loop *
1032 duplicate_loop (struct loop *loop, struct loop *target)
1033 {
1034   struct loop *cloop;
1035   cloop = alloc_loop ();
1036   place_new_loop (cfun, cloop);
1037 
1038   copy_loop_info (loop, cloop);
1039 
1040   /* Mark the new loop as copy of LOOP.  */
1041   set_loop_copy (loop, cloop);
1042 
1043   /* Add it to target.  */
1044   flow_loop_tree_node_add (target, cloop);
1045 
1046   return cloop;
1047 }
1048 
1049 /* Copies structure of subloops of LOOP into TARGET loop, placing
1050    newly created loops into loop tree.  */
1051 void
1052 duplicate_subloops (struct loop *loop, struct loop *target)
1053 {
1054   struct loop *aloop, *cloop;
1055 
1056   for (aloop = loop->inner; aloop; aloop = aloop->next)
1057     {
1058       cloop = duplicate_loop (aloop, target);
1059       duplicate_subloops (aloop, cloop);
1060     }
1061 }
1062 
1063 /* Copies structure of subloops of N loops, stored in array COPIED_LOOPS,
1064    into TARGET loop, placing newly created loops into loop tree.  */
1065 static void
1066 copy_loops_to (struct loop **copied_loops, int n, struct loop *target)
1067 {
1068   struct loop *aloop;
1069   int i;
1070 
1071   for (i = 0; i < n; i++)
1072     {
1073       aloop = duplicate_loop (copied_loops[i], target);
1074       duplicate_subloops (copied_loops[i], aloop);
1075     }
1076 }
1077 
1078 /* Redirects edge E to basic block DEST.  */
1079 static void
1080 loop_redirect_edge (edge e, basic_block dest)
1081 {
1082   if (e->dest == dest)
1083     return;
1084 
1085   redirect_edge_and_branch_force (e, dest);
1086 }
1087 
1088 /* Check whether LOOP's body can be duplicated.  */
1089 bool
1090 can_duplicate_loop_p (const struct loop *loop)
1091 {
1092   int ret;
1093   basic_block *bbs = get_loop_body (loop);
1094 
1095   ret = can_copy_bbs_p (bbs, loop->num_nodes);
1096   free (bbs);
1097 
1098   return ret;
1099 }
1100 
1101 /* Sets probability and count of edge E to zero.  The probability and count
1102    is redistributed evenly to the remaining edges coming from E->src.  */
1103 
1104 static void
1105 set_zero_probability (edge e)
1106 {
1107   basic_block bb = e->src;
1108   edge_iterator ei;
1109   edge ae, last = NULL;
1110   unsigned n = EDGE_COUNT (bb->succs);
1111   gcov_type cnt = e->count, cnt1;
1112   unsigned prob = e->probability, prob1;
1113 
1114   gcc_assert (n > 1);
1115   cnt1 = cnt / (n - 1);
1116   prob1 = prob / (n - 1);
1117 
1118   FOR_EACH_EDGE (ae, ei, bb->succs)
1119     {
1120       if (ae == e)
1121 	continue;
1122 
1123       ae->probability += prob1;
1124       ae->count += cnt1;
1125       last = ae;
1126     }
1127 
1128   /* Move the rest to one of the edges.  */
1129   last->probability += prob % (n - 1);
1130   last->count += cnt % (n - 1);
1131 
1132   e->probability = 0;
1133   e->count = 0;
1134 }
1135 
1136 /* Duplicates body of LOOP to given edge E NDUPL times.  Takes care of updating
1137    loop structure and dominators.  E's destination must be LOOP header for
1138    this to work, i.e. it must be entry or latch edge of this loop; these are
1139    unique, as the loops must have preheaders for this function to work
1140    correctly (in case E is latch, the function unrolls the loop, if E is entry
1141    edge, it peels the loop).  Store edges created by copying ORIG edge from
1142    copies corresponding to set bits in WONT_EXIT bitmap (bit 0 corresponds to
1143    original LOOP body, the other copies are numbered in order given by control
1144    flow through them) into TO_REMOVE array.  Returns false if duplication is
1145    impossible.  */
1146 
1147 bool
1148 duplicate_loop_to_header_edge (struct loop *loop, edge e,
1149 			       unsigned int ndupl, sbitmap wont_exit,
1150 			       edge orig, vec<edge> *to_remove,
1151 			       int flags)
1152 {
1153   struct loop *target, *aloop;
1154   struct loop **orig_loops;
1155   unsigned n_orig_loops;
1156   basic_block header = loop->header, latch = loop->latch;
1157   basic_block *new_bbs, *bbs, *first_active;
1158   basic_block new_bb, bb, first_active_latch = NULL;
1159   edge ae, latch_edge;
1160   edge spec_edges[2], new_spec_edges[2];
1161 #define SE_LATCH 0
1162 #define SE_ORIG 1
1163   unsigned i, j, n;
1164   int is_latch = (latch == e->src);
1165   int scale_act = 0, *scale_step = NULL, scale_main = 0;
1166   int scale_after_exit = 0;
1167   int p, freq_in, freq_le, freq_out_orig;
1168   int prob_pass_thru, prob_pass_wont_exit, prob_pass_main;
1169   int add_irreducible_flag;
1170   basic_block place_after;
1171   bitmap bbs_to_scale = NULL;
1172   bitmap_iterator bi;
1173 
1174   gcc_assert (e->dest == loop->header);
1175   gcc_assert (ndupl > 0);
1176 
1177   if (orig)
1178     {
1179       /* Orig must be edge out of the loop.  */
1180       gcc_assert (flow_bb_inside_loop_p (loop, orig->src));
1181       gcc_assert (!flow_bb_inside_loop_p (loop, orig->dest));
1182     }
1183 
1184   n = loop->num_nodes;
1185   bbs = get_loop_body_in_dom_order (loop);
1186   gcc_assert (bbs[0] == loop->header);
1187   gcc_assert (bbs[n  - 1] == loop->latch);
1188 
1189   /* Check whether duplication is possible.  */
1190   if (!can_copy_bbs_p (bbs, loop->num_nodes))
1191     {
1192       free (bbs);
1193       return false;
1194     }
1195   new_bbs = XNEWVEC (basic_block, loop->num_nodes);
1196 
1197   /* In case we are doing loop peeling and the loop is in the middle of
1198      irreducible region, the peeled copies will be inside it too.  */
1199   add_irreducible_flag = e->flags & EDGE_IRREDUCIBLE_LOOP;
1200   gcc_assert (!is_latch || !add_irreducible_flag);
1201 
1202   /* Find edge from latch.  */
1203   latch_edge = loop_latch_edge (loop);
1204 
1205   if (flags & DLTHE_FLAG_UPDATE_FREQ)
1206     {
1207       /* Calculate coefficients by that we have to scale frequencies
1208 	 of duplicated loop bodies.  */
1209       freq_in = header->frequency;
1210       freq_le = EDGE_FREQUENCY (latch_edge);
1211       if (freq_in == 0)
1212 	freq_in = 1;
1213       if (freq_in < freq_le)
1214 	freq_in = freq_le;
1215       freq_out_orig = orig ? EDGE_FREQUENCY (orig) : freq_in - freq_le;
1216       if (freq_out_orig > freq_in - freq_le)
1217 	freq_out_orig = freq_in - freq_le;
1218       prob_pass_thru = RDIV (REG_BR_PROB_BASE * freq_le, freq_in);
1219       prob_pass_wont_exit =
1220 	      RDIV (REG_BR_PROB_BASE * (freq_le + freq_out_orig), freq_in);
1221 
1222       if (orig
1223 	  && REG_BR_PROB_BASE - orig->probability != 0)
1224 	{
1225 	  /* The blocks that are dominated by a removed exit edge ORIG have
1226 	     frequencies scaled by this.  */
1227 	  scale_after_exit
1228               = GCOV_COMPUTE_SCALE (REG_BR_PROB_BASE,
1229                                     REG_BR_PROB_BASE - orig->probability);
1230 	  bbs_to_scale = BITMAP_ALLOC (NULL);
1231 	  for (i = 0; i < n; i++)
1232 	    {
1233 	      if (bbs[i] != orig->src
1234 		  && dominated_by_p (CDI_DOMINATORS, bbs[i], orig->src))
1235 		bitmap_set_bit (bbs_to_scale, i);
1236 	    }
1237 	}
1238 
1239       scale_step = XNEWVEC (int, ndupl);
1240 
1241       for (i = 1; i <= ndupl; i++)
1242 	scale_step[i - 1] = bitmap_bit_p (wont_exit, i)
1243 				? prob_pass_wont_exit
1244 				: prob_pass_thru;
1245 
1246       /* Complete peeling is special as the probability of exit in last
1247 	 copy becomes 1.  */
1248       if (flags & DLTHE_FLAG_COMPLETTE_PEEL)
1249 	{
1250 	  int wanted_freq = EDGE_FREQUENCY (e);
1251 
1252 	  if (wanted_freq > freq_in)
1253 	    wanted_freq = freq_in;
1254 
1255 	  gcc_assert (!is_latch);
1256 	  /* First copy has frequency of incoming edge.  Each subsequent
1257 	     frequency should be reduced by prob_pass_wont_exit.  Caller
1258 	     should've managed the flags so all except for original loop
1259 	     has won't exist set.  */
1260 	  scale_act = GCOV_COMPUTE_SCALE (wanted_freq, freq_in);
1261 	  /* Now simulate the duplication adjustments and compute header
1262 	     frequency of the last copy.  */
1263 	  for (i = 0; i < ndupl; i++)
1264 	    wanted_freq = combine_probabilities (wanted_freq, scale_step[i]);
1265 	  scale_main = GCOV_COMPUTE_SCALE (wanted_freq, freq_in);
1266 	}
1267       else if (is_latch)
1268 	{
1269 	  prob_pass_main = bitmap_bit_p (wont_exit, 0)
1270 				? prob_pass_wont_exit
1271 				: prob_pass_thru;
1272 	  p = prob_pass_main;
1273 	  scale_main = REG_BR_PROB_BASE;
1274 	  for (i = 0; i < ndupl; i++)
1275 	    {
1276 	      scale_main += p;
1277 	      p = combine_probabilities (p, scale_step[i]);
1278 	    }
1279 	  scale_main = GCOV_COMPUTE_SCALE (REG_BR_PROB_BASE, scale_main);
1280 	  scale_act = combine_probabilities (scale_main, prob_pass_main);
1281 	}
1282       else
1283 	{
1284 	  int preheader_freq = EDGE_FREQUENCY (e);
1285 	  scale_main = REG_BR_PROB_BASE;
1286 	  for (i = 0; i < ndupl; i++)
1287 	    scale_main = combine_probabilities (scale_main, scale_step[i]);
1288 	  if (preheader_freq > freq_in)
1289 	    preheader_freq = freq_in;
1290 	  scale_act = GCOV_COMPUTE_SCALE (preheader_freq, freq_in);
1291 	}
1292       for (i = 0; i < ndupl; i++)
1293 	gcc_assert (scale_step[i] >= 0 && scale_step[i] <= REG_BR_PROB_BASE);
1294       gcc_assert (scale_main >= 0 && scale_main <= REG_BR_PROB_BASE
1295 		  && scale_act >= 0  && scale_act <= REG_BR_PROB_BASE);
1296     }
1297 
1298   /* Loop the new bbs will belong to.  */
1299   target = e->src->loop_father;
1300 
1301   /* Original loops.  */
1302   n_orig_loops = 0;
1303   for (aloop = loop->inner; aloop; aloop = aloop->next)
1304     n_orig_loops++;
1305   orig_loops = XNEWVEC (struct loop *, n_orig_loops);
1306   for (aloop = loop->inner, i = 0; aloop; aloop = aloop->next, i++)
1307     orig_loops[i] = aloop;
1308 
1309   set_loop_copy (loop, target);
1310 
1311   first_active = XNEWVEC (basic_block, n);
1312   if (is_latch)
1313     {
1314       memcpy (first_active, bbs, n * sizeof (basic_block));
1315       first_active_latch = latch;
1316     }
1317 
1318   spec_edges[SE_ORIG] = orig;
1319   spec_edges[SE_LATCH] = latch_edge;
1320 
1321   place_after = e->src;
1322   for (j = 0; j < ndupl; j++)
1323     {
1324       /* Copy loops.  */
1325       copy_loops_to (orig_loops, n_orig_loops, target);
1326 
1327       /* Copy bbs.  */
1328       copy_bbs (bbs, n, new_bbs, spec_edges, 2, new_spec_edges, loop,
1329 		place_after, true);
1330       place_after = new_spec_edges[SE_LATCH]->src;
1331 
1332       if (flags & DLTHE_RECORD_COPY_NUMBER)
1333 	for (i = 0; i < n; i++)
1334 	  {
1335 	    gcc_assert (!new_bbs[i]->aux);
1336 	    new_bbs[i]->aux = (void *)(size_t)(j + 1);
1337 	  }
1338 
1339       /* Note whether the blocks and edges belong to an irreducible loop.  */
1340       if (add_irreducible_flag)
1341 	{
1342 	  for (i = 0; i < n; i++)
1343 	    new_bbs[i]->flags |= BB_DUPLICATED;
1344 	  for (i = 0; i < n; i++)
1345 	    {
1346 	      edge_iterator ei;
1347 	      new_bb = new_bbs[i];
1348 	      if (new_bb->loop_father == target)
1349 		new_bb->flags |= BB_IRREDUCIBLE_LOOP;
1350 
1351 	      FOR_EACH_EDGE (ae, ei, new_bb->succs)
1352 		if ((ae->dest->flags & BB_DUPLICATED)
1353 		    && (ae->src->loop_father == target
1354 			|| ae->dest->loop_father == target))
1355 		  ae->flags |= EDGE_IRREDUCIBLE_LOOP;
1356 	    }
1357 	  for (i = 0; i < n; i++)
1358 	    new_bbs[i]->flags &= ~BB_DUPLICATED;
1359 	}
1360 
1361       /* Redirect the special edges.  */
1362       if (is_latch)
1363 	{
1364 	  redirect_edge_and_branch_force (latch_edge, new_bbs[0]);
1365 	  redirect_edge_and_branch_force (new_spec_edges[SE_LATCH],
1366 					  loop->header);
1367 	  set_immediate_dominator (CDI_DOMINATORS, new_bbs[0], latch);
1368 	  latch = loop->latch = new_bbs[n - 1];
1369 	  e = latch_edge = new_spec_edges[SE_LATCH];
1370 	}
1371       else
1372 	{
1373 	  redirect_edge_and_branch_force (new_spec_edges[SE_LATCH],
1374 					  loop->header);
1375 	  redirect_edge_and_branch_force (e, new_bbs[0]);
1376 	  set_immediate_dominator (CDI_DOMINATORS, new_bbs[0], e->src);
1377 	  e = new_spec_edges[SE_LATCH];
1378 	}
1379 
1380       /* Record exit edge in this copy.  */
1381       if (orig && bitmap_bit_p (wont_exit, j + 1))
1382 	{
1383 	  if (to_remove)
1384 	    to_remove->safe_push (new_spec_edges[SE_ORIG]);
1385 	  set_zero_probability (new_spec_edges[SE_ORIG]);
1386 
1387 	  /* Scale the frequencies of the blocks dominated by the exit.  */
1388 	  if (bbs_to_scale)
1389 	    {
1390 	      EXECUTE_IF_SET_IN_BITMAP (bbs_to_scale, 0, i, bi)
1391 		{
1392 		  scale_bbs_frequencies_int (new_bbs + i, 1, scale_after_exit,
1393 					     REG_BR_PROB_BASE);
1394 		}
1395 	    }
1396 	}
1397 
1398       /* Record the first copy in the control flow order if it is not
1399 	 the original loop (i.e. in case of peeling).  */
1400       if (!first_active_latch)
1401 	{
1402 	  memcpy (first_active, new_bbs, n * sizeof (basic_block));
1403 	  first_active_latch = new_bbs[n - 1];
1404 	}
1405 
1406       /* Set counts and frequencies.  */
1407       if (flags & DLTHE_FLAG_UPDATE_FREQ)
1408 	{
1409 	  scale_bbs_frequencies_int (new_bbs, n, scale_act, REG_BR_PROB_BASE);
1410 	  scale_act = combine_probabilities (scale_act, scale_step[j]);
1411 	}
1412     }
1413   free (new_bbs);
1414   free (orig_loops);
1415 
1416   /* Record the exit edge in the original loop body, and update the frequencies.  */
1417   if (orig && bitmap_bit_p (wont_exit, 0))
1418     {
1419       if (to_remove)
1420 	to_remove->safe_push (orig);
1421       set_zero_probability (orig);
1422 
1423       /* Scale the frequencies of the blocks dominated by the exit.  */
1424       if (bbs_to_scale)
1425 	{
1426 	  EXECUTE_IF_SET_IN_BITMAP (bbs_to_scale, 0, i, bi)
1427 	    {
1428 	      scale_bbs_frequencies_int (bbs + i, 1, scale_after_exit,
1429 					 REG_BR_PROB_BASE);
1430 	    }
1431 	}
1432     }
1433 
1434   /* Update the original loop.  */
1435   if (!is_latch)
1436     set_immediate_dominator (CDI_DOMINATORS, e->dest, e->src);
1437   if (flags & DLTHE_FLAG_UPDATE_FREQ)
1438     {
1439       scale_bbs_frequencies_int (bbs, n, scale_main, REG_BR_PROB_BASE);
1440       free (scale_step);
1441     }
1442 
1443   /* Update dominators of outer blocks if affected.  */
1444   for (i = 0; i < n; i++)
1445     {
1446       basic_block dominated, dom_bb;
1447       vec<basic_block> dom_bbs;
1448       unsigned j;
1449 
1450       bb = bbs[i];
1451       bb->aux = 0;
1452 
1453       dom_bbs = get_dominated_by (CDI_DOMINATORS, bb);
1454       FOR_EACH_VEC_ELT (dom_bbs, j, dominated)
1455 	{
1456 	  if (flow_bb_inside_loop_p (loop, dominated))
1457 	    continue;
1458 	  dom_bb = nearest_common_dominator (
1459 			CDI_DOMINATORS, first_active[i], first_active_latch);
1460 	  set_immediate_dominator (CDI_DOMINATORS, dominated, dom_bb);
1461 	}
1462       dom_bbs.release ();
1463     }
1464   free (first_active);
1465 
1466   free (bbs);
1467   BITMAP_FREE (bbs_to_scale);
1468 
1469   return true;
1470 }
1471 
1472 /* A callback for make_forwarder block, to redirect all edges except for
1473    MFB_KJ_EDGE to the entry part.  E is the edge for that we should decide
1474    whether to redirect it.  */
1475 
1476 edge mfb_kj_edge;
1477 bool
1478 mfb_keep_just (edge e)
1479 {
1480   return e != mfb_kj_edge;
1481 }
1482 
1483 /* True when a candidate preheader BLOCK has predecessors from LOOP.  */
1484 
1485 static bool
1486 has_preds_from_loop (basic_block block, struct loop *loop)
1487 {
1488   edge e;
1489   edge_iterator ei;
1490 
1491   FOR_EACH_EDGE (e, ei, block->preds)
1492     if (e->src->loop_father == loop)
1493       return true;
1494   return false;
1495 }
1496 
1497 /* Creates a pre-header for a LOOP.  Returns newly created block.  Unless
1498    CP_SIMPLE_PREHEADERS is set in FLAGS, we only force LOOP to have single
1499    entry; otherwise we also force preheader block to have only one successor.
1500    When CP_FALLTHRU_PREHEADERS is set in FLAGS, we force the preheader block
1501    to be a fallthru predecessor to the loop header and to have only
1502    predecessors from outside of the loop.
1503    The function also updates dominators.  */
1504 
1505 basic_block
1506 create_preheader (struct loop *loop, int flags)
1507 {
1508   edge e;
1509   basic_block dummy;
1510   int nentry = 0;
1511   bool irred = false;
1512   bool latch_edge_was_fallthru;
1513   edge one_succ_pred = NULL, single_entry = NULL;
1514   edge_iterator ei;
1515 
1516   FOR_EACH_EDGE (e, ei, loop->header->preds)
1517     {
1518       if (e->src == loop->latch)
1519 	continue;
1520       irred |= (e->flags & EDGE_IRREDUCIBLE_LOOP) != 0;
1521       nentry++;
1522       single_entry = e;
1523       if (single_succ_p (e->src))
1524 	one_succ_pred = e;
1525     }
1526   gcc_assert (nentry);
1527   if (nentry == 1)
1528     {
1529       bool need_forwarder_block = false;
1530 
1531       /* We do not allow entry block to be the loop preheader, since we
1532 	     cannot emit code there.  */
1533       if (single_entry->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1534         need_forwarder_block = true;
1535       else
1536         {
1537           /* If we want simple preheaders, also force the preheader to have
1538              just a single successor.  */
1539           if ((flags & CP_SIMPLE_PREHEADERS)
1540               && !single_succ_p (single_entry->src))
1541             need_forwarder_block = true;
1542           /* If we want fallthru preheaders, also create forwarder block when
1543              preheader ends with a jump or has predecessors from loop.  */
1544           else if ((flags & CP_FALLTHRU_PREHEADERS)
1545                    && (JUMP_P (BB_END (single_entry->src))
1546                        || has_preds_from_loop (single_entry->src, loop)))
1547             need_forwarder_block = true;
1548         }
1549       if (! need_forwarder_block)
1550 	return NULL;
1551     }
1552 
1553   mfb_kj_edge = loop_latch_edge (loop);
1554   latch_edge_was_fallthru = (mfb_kj_edge->flags & EDGE_FALLTHRU) != 0;
1555   if (nentry == 1)
1556     dummy = split_edge (single_entry);
1557   else
1558     {
1559       edge fallthru = make_forwarder_block (loop->header, mfb_keep_just, NULL);
1560       dummy = fallthru->src;
1561       loop->header = fallthru->dest;
1562     }
1563 
1564   /* Try to be clever in placing the newly created preheader.  The idea is to
1565      avoid breaking any "fallthruness" relationship between blocks.
1566 
1567      The preheader was created just before the header and all incoming edges
1568      to the header were redirected to the preheader, except the latch edge.
1569      So the only problematic case is when this latch edge was a fallthru
1570      edge: it is not anymore after the preheader creation so we have broken
1571      the fallthruness.  We're therefore going to look for a better place.  */
1572   if (latch_edge_was_fallthru)
1573     {
1574       if (one_succ_pred)
1575 	e = one_succ_pred;
1576       else
1577 	e = EDGE_PRED (dummy, 0);
1578 
1579       move_block_after (dummy, e->src);
1580     }
1581 
1582   if (irred)
1583     {
1584       dummy->flags |= BB_IRREDUCIBLE_LOOP;
1585       single_succ_edge (dummy)->flags |= EDGE_IRREDUCIBLE_LOOP;
1586     }
1587 
1588   if (dump_file)
1589     fprintf (dump_file, "Created preheader block for loop %i\n",
1590 	     loop->num);
1591 
1592   if (flags & CP_FALLTHRU_PREHEADERS)
1593     gcc_assert ((single_succ_edge (dummy)->flags & EDGE_FALLTHRU)
1594                 && !JUMP_P (BB_END (dummy)));
1595 
1596   return dummy;
1597 }
1598 
1599 /* Create preheaders for each loop; for meaning of FLAGS see create_preheader.  */
1600 
1601 void
1602 create_preheaders (int flags)
1603 {
1604   struct loop *loop;
1605 
1606   if (!current_loops)
1607     return;
1608 
1609   FOR_EACH_LOOP (loop, 0)
1610     create_preheader (loop, flags);
1611   loops_state_set (LOOPS_HAVE_PREHEADERS);
1612 }
1613 
1614 /* Forces all loop latches to have only single successor.  */
1615 
1616 void
1617 force_single_succ_latches (void)
1618 {
1619   struct loop *loop;
1620   edge e;
1621 
1622   FOR_EACH_LOOP (loop, 0)
1623     {
1624       if (loop->latch != loop->header && single_succ_p (loop->latch))
1625 	continue;
1626 
1627       e = find_edge (loop->latch, loop->header);
1628       gcc_checking_assert (e != NULL);
1629 
1630       split_edge (e);
1631     }
1632   loops_state_set (LOOPS_HAVE_SIMPLE_LATCHES);
1633 }
1634 
1635 /* This function is called from loop_version.  It splits the entry edge
1636    of the loop we want to version, adds the versioning condition, and
1637    adjust the edges to the two versions of the loop appropriately.
1638    e is an incoming edge. Returns the basic block containing the
1639    condition.
1640 
1641    --- edge e ---- > [second_head]
1642 
1643    Split it and insert new conditional expression and adjust edges.
1644 
1645     --- edge e ---> [cond expr] ---> [first_head]
1646 			|
1647 			+---------> [second_head]
1648 
1649   THEN_PROB is the probability of then branch of the condition.
1650   ELSE_PROB is the probability of else branch. Note that they may be both
1651   REG_BR_PROB_BASE when condition is IFN_LOOP_VECTORIZED.  */
1652 
1653 static basic_block
1654 lv_adjust_loop_entry_edge (basic_block first_head, basic_block second_head,
1655 			   edge e, void *cond_expr, unsigned then_prob,
1656 			   unsigned else_prob)
1657 {
1658   basic_block new_head = NULL;
1659   edge e1;
1660 
1661   gcc_assert (e->dest == second_head);
1662 
1663   /* Split edge 'e'. This will create a new basic block, where we can
1664      insert conditional expr.  */
1665   new_head = split_edge (e);
1666 
1667   lv_add_condition_to_bb (first_head, second_head, new_head,
1668 			  cond_expr);
1669 
1670   /* Don't set EDGE_TRUE_VALUE in RTL mode, as it's invalid there.  */
1671   e = single_succ_edge (new_head);
1672   e1 = make_edge (new_head, first_head,
1673 		  current_ir_type () == IR_GIMPLE ? EDGE_TRUE_VALUE : 0);
1674   e1->probability = then_prob;
1675   e->probability = else_prob;
1676   e1->count = apply_probability (e->count, e1->probability);
1677   e->count = apply_probability (e->count, e->probability);
1678 
1679   set_immediate_dominator (CDI_DOMINATORS, first_head, new_head);
1680   set_immediate_dominator (CDI_DOMINATORS, second_head, new_head);
1681 
1682   /* Adjust loop header phi nodes.  */
1683   lv_adjust_loop_header_phi (first_head, second_head, new_head, e1);
1684 
1685   return new_head;
1686 }
1687 
1688 /* Main entry point for Loop Versioning transformation.
1689 
1690    This transformation given a condition and a loop, creates
1691    -if (condition) { loop_copy1 } else { loop_copy2 },
1692    where loop_copy1 is the loop transformed in one way, and loop_copy2
1693    is the loop transformed in another way (or unchanged). COND_EXPR
1694    may be a run time test for things that were not resolved by static
1695    analysis (overlapping ranges (anti-aliasing), alignment, etc.).
1696 
1697    If non-NULL, CONDITION_BB is set to the basic block containing the
1698    condition.
1699 
1700    THEN_PROB is the probability of the then edge of the if.  THEN_SCALE
1701    is the ratio by that the frequencies in the original loop should
1702    be scaled.  ELSE_SCALE is the ratio by that the frequencies in the
1703    new loop should be scaled.
1704 
1705    If PLACE_AFTER is true, we place the new loop after LOOP in the
1706    instruction stream, otherwise it is placed before LOOP.  */
1707 
1708 struct loop *
1709 loop_version (struct loop *loop,
1710 	      void *cond_expr, basic_block *condition_bb,
1711 	      unsigned then_prob, unsigned else_prob,
1712 	      unsigned then_scale, unsigned else_scale,
1713 	      bool place_after)
1714 {
1715   basic_block first_head, second_head;
1716   edge entry, latch_edge, true_edge, false_edge;
1717   int irred_flag;
1718   struct loop *nloop;
1719   basic_block cond_bb;
1720 
1721   /* Record entry and latch edges for the loop */
1722   entry = loop_preheader_edge (loop);
1723   irred_flag = entry->flags & EDGE_IRREDUCIBLE_LOOP;
1724   entry->flags &= ~EDGE_IRREDUCIBLE_LOOP;
1725 
1726   /* Note down head of loop as first_head.  */
1727   first_head = entry->dest;
1728 
1729   /* Duplicate loop.  */
1730   if (!cfg_hook_duplicate_loop_to_header_edge (loop, entry, 1,
1731 					       NULL, NULL, NULL, 0))
1732     {
1733       entry->flags |= irred_flag;
1734       return NULL;
1735     }
1736 
1737   /* After duplication entry edge now points to new loop head block.
1738      Note down new head as second_head.  */
1739   second_head = entry->dest;
1740 
1741   /* Split loop entry edge and insert new block with cond expr.  */
1742   cond_bb =  lv_adjust_loop_entry_edge (first_head, second_head,
1743 					entry, cond_expr, then_prob, else_prob);
1744   if (condition_bb)
1745     *condition_bb = cond_bb;
1746 
1747   if (!cond_bb)
1748     {
1749       entry->flags |= irred_flag;
1750       return NULL;
1751     }
1752 
1753   latch_edge = single_succ_edge (get_bb_copy (loop->latch));
1754 
1755   extract_cond_bb_edges (cond_bb, &true_edge, &false_edge);
1756   nloop = loopify (latch_edge,
1757 		   single_pred_edge (get_bb_copy (loop->header)),
1758 		   cond_bb, true_edge, false_edge,
1759 		   false /* Do not redirect all edges.  */,
1760 		   then_scale, else_scale);
1761 
1762   copy_loop_info (loop, nloop);
1763 
1764   /* loopify redirected latch_edge. Update its PENDING_STMTS.  */
1765   lv_flush_pending_stmts (latch_edge);
1766 
1767   /* loopify redirected condition_bb's succ edge. Update its PENDING_STMTS.  */
1768   extract_cond_bb_edges (cond_bb, &true_edge, &false_edge);
1769   lv_flush_pending_stmts (false_edge);
1770   /* Adjust irreducible flag.  */
1771   if (irred_flag)
1772     {
1773       cond_bb->flags |= BB_IRREDUCIBLE_LOOP;
1774       loop_preheader_edge (loop)->flags |= EDGE_IRREDUCIBLE_LOOP;
1775       loop_preheader_edge (nloop)->flags |= EDGE_IRREDUCIBLE_LOOP;
1776       single_pred_edge (cond_bb)->flags |= EDGE_IRREDUCIBLE_LOOP;
1777     }
1778 
1779   if (place_after)
1780     {
1781       basic_block *bbs = get_loop_body_in_dom_order (nloop), after;
1782       unsigned i;
1783 
1784       after = loop->latch;
1785 
1786       for (i = 0; i < nloop->num_nodes; i++)
1787 	{
1788 	  move_block_after (bbs[i], after);
1789 	  after = bbs[i];
1790 	}
1791       free (bbs);
1792     }
1793 
1794   /* At this point condition_bb is loop preheader with two successors,
1795      first_head and second_head.   Make sure that loop preheader has only
1796      one successor.  */
1797   split_edge (loop_preheader_edge (loop));
1798   split_edge (loop_preheader_edge (nloop));
1799 
1800   return nloop;
1801 }
1802