xref: /netbsd-src/external/gpl3/gcc/dist/gcc/cfganal.cc (revision b1e838363e3c6fc78a55519254d99869742dd33c)
1 /* Control flow graph analysis code for GNU compiler.
2    Copyright (C) 1987-2022 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* This file contains various simple utilities to analyze the CFG.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "cfghooks.h"
27 #include "timevar.h"
28 #include "cfganal.h"
29 #include "cfgloop.h"
30 #include "diagnostic.h"
31 
32 namespace {
33 /* Store the data structures necessary for depth-first search.  */
34 class depth_first_search
35   {
36 public:
37     depth_first_search ();
38 
39     basic_block execute (basic_block);
40     void add_bb (basic_block);
41 
42 private:
43   /* stack for backtracking during the algorithm */
44   auto_vec<basic_block, 20> m_stack;
45 
46   /* record of basic blocks already seen by depth-first search */
47   auto_sbitmap m_visited_blocks;
48 };
49 }
50 
51 /* Mark the back edges in DFS traversal.
52    Return nonzero if a loop (natural or otherwise) is present.
53    Inspired by Depth_First_Search_PP described in:
54 
55      Advanced Compiler Design and Implementation
56      Steven Muchnick
57      Morgan Kaufmann, 1997
58 
59    and heavily borrowed from pre_and_rev_post_order_compute.  */
60 
61 bool
mark_dfs_back_edges(struct function * fun)62 mark_dfs_back_edges (struct function *fun)
63 {
64   int *pre;
65   int *post;
66   int prenum = 1;
67   int postnum = 1;
68   bool found = false;
69 
70   /* Allocate the preorder and postorder number arrays.  */
71   pre = XCNEWVEC (int, last_basic_block_for_fn (fun));
72   post = XCNEWVEC (int, last_basic_block_for_fn (fun));
73 
74   /* Allocate stack for back-tracking up CFG.  */
75   auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (fun) + 1);
76 
77   /* Allocate bitmap to track nodes that have been visited.  */
78   auto_sbitmap visited (last_basic_block_for_fn (fun));
79 
80   /* None of the nodes in the CFG have been visited yet.  */
81   bitmap_clear (visited);
82 
83   /* Push the first edge on to the stack.  */
84   stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (fun)->succs));
85 
86   while (!stack.is_empty ())
87     {
88       basic_block src;
89       basic_block dest;
90 
91       /* Look at the edge on the top of the stack.  */
92       edge_iterator ei = stack.last ();
93       src = ei_edge (ei)->src;
94       dest = ei_edge (ei)->dest;
95       ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
96 
97       /* Check if the edge destination has been visited yet.  */
98       if (dest != EXIT_BLOCK_PTR_FOR_FN (fun) && ! bitmap_bit_p (visited,
99 								 dest->index))
100 	{
101 	  /* Mark that we have visited the destination.  */
102 	  bitmap_set_bit (visited, dest->index);
103 
104 	  pre[dest->index] = prenum++;
105 	  if (EDGE_COUNT (dest->succs) > 0)
106 	    {
107 	      /* Since the DEST node has been visited for the first
108 		 time, check its successors.  */
109 	      stack.quick_push (ei_start (dest->succs));
110 	    }
111 	  else
112 	    post[dest->index] = postnum++;
113 	}
114       else
115 	{
116 	  if (dest != EXIT_BLOCK_PTR_FOR_FN (fun)
117 	      && src != ENTRY_BLOCK_PTR_FOR_FN (fun)
118 	      && pre[src->index] >= pre[dest->index]
119 	      && post[dest->index] == 0)
120 	    ei_edge (ei)->flags |= EDGE_DFS_BACK, found = true;
121 
122 	  if (ei_one_before_end_p (ei)
123 	      && src != ENTRY_BLOCK_PTR_FOR_FN (fun))
124 	    post[src->index] = postnum++;
125 
126 	  if (!ei_one_before_end_p (ei))
127 	    ei_next (&stack.last ());
128 	  else
129 	    stack.pop ();
130 	}
131     }
132 
133   free (pre);
134   free (post);
135 
136   return found;
137 }
138 
139 bool
mark_dfs_back_edges(void)140 mark_dfs_back_edges (void)
141 {
142   return mark_dfs_back_edges (cfun);
143 }
144 
145 /* Return TRUE if EDGE_DFS_BACK is up to date for CFUN.  */
146 
147 void
verify_marked_backedges(struct function * fun)148 verify_marked_backedges (struct function *fun)
149 {
150   auto_edge_flag saved_dfs_back (fun);
151   basic_block bb;
152   edge e;
153   edge_iterator ei;
154 
155   // Save all the back edges...
156   FOR_EACH_BB_FN (bb, fun)
157     FOR_EACH_EDGE (e, ei, bb->succs)
158       {
159 	if (e->flags & EDGE_DFS_BACK)
160 	  {
161 	    e->flags |= saved_dfs_back;
162 	    e->flags &= ~EDGE_DFS_BACK;
163 	  }
164       }
165 
166   // ...and verify that recalculating them agrees with the saved ones.
167   mark_dfs_back_edges ();
168   FOR_EACH_BB_FN (bb, fun)
169     FOR_EACH_EDGE (e, ei, bb->succs)
170       {
171 	if (((e->flags & EDGE_DFS_BACK) != 0)
172 	    != ((e->flags & saved_dfs_back) != 0))
173 	  internal_error ("%<verify_marked_backedges%> failed");
174 
175 	e->flags &= ~saved_dfs_back;
176       }
177 }
178 
179 /* Find unreachable blocks.  An unreachable block will have 0 in
180    the reachable bit in block->flags.  A nonzero value indicates the
181    block is reachable.  */
182 
183 void
find_unreachable_blocks(void)184 find_unreachable_blocks (void)
185 {
186   edge e;
187   edge_iterator ei;
188   basic_block *tos, *worklist, bb;
189 
190   tos = worklist = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
191 
192   /* Clear all the reachability flags.  */
193 
194   FOR_EACH_BB_FN (bb, cfun)
195     bb->flags &= ~BB_REACHABLE;
196 
197   /* Add our starting points to the worklist.  Almost always there will
198      be only one.  It isn't inconceivable that we might one day directly
199      support Fortran alternate entry points.  */
200 
201   FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs)
202     {
203       *tos++ = e->dest;
204 
205       /* Mark the block reachable.  */
206       e->dest->flags |= BB_REACHABLE;
207     }
208 
209   /* Iterate: find everything reachable from what we've already seen.  */
210 
211   while (tos != worklist)
212     {
213       basic_block b = *--tos;
214 
215       FOR_EACH_EDGE (e, ei, b->succs)
216 	{
217 	  basic_block dest = e->dest;
218 
219 	  if (!(dest->flags & BB_REACHABLE))
220 	    {
221 	      *tos++ = dest;
222 	      dest->flags |= BB_REACHABLE;
223 	    }
224 	}
225     }
226 
227   free (worklist);
228 }
229 
230 /* Verify that there are no unreachable blocks in the current function.  */
231 
232 void
verify_no_unreachable_blocks(void)233 verify_no_unreachable_blocks (void)
234 {
235   find_unreachable_blocks ();
236 
237   basic_block bb;
238   FOR_EACH_BB_FN (bb, cfun)
239     gcc_assert ((bb->flags & BB_REACHABLE) != 0);
240 }
241 
242 
243 /* Functions to access an edge list with a vector representation.
244    Enough data is kept such that given an index number, the
245    pred and succ that edge represents can be determined, or
246    given a pred and a succ, its index number can be returned.
247    This allows algorithms which consume a lot of memory to
248    represent the normally full matrix of edge (pred,succ) with a
249    single indexed vector,  edge (EDGE_INDEX (pred, succ)), with no
250    wasted space in the client code due to sparse flow graphs.  */
251 
252 /* This functions initializes the edge list. Basically the entire
253    flowgraph is processed, and all edges are assigned a number,
254    and the data structure is filled in.  */
255 
256 struct edge_list *
create_edge_list(void)257 create_edge_list (void)
258 {
259   struct edge_list *elist;
260   edge e;
261   int num_edges;
262   basic_block bb;
263   edge_iterator ei;
264 
265   /* Determine the number of edges in the flow graph by counting successor
266      edges on each basic block.  */
267   num_edges = 0;
268   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
269 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
270     {
271       num_edges += EDGE_COUNT (bb->succs);
272     }
273 
274   elist = XNEW (struct edge_list);
275   elist->num_edges = num_edges;
276   elist->index_to_edge = XNEWVEC (edge, num_edges);
277 
278   num_edges = 0;
279 
280   /* Follow successors of blocks, and register these edges.  */
281   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
282 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
283     FOR_EACH_EDGE (e, ei, bb->succs)
284       elist->index_to_edge[num_edges++] = e;
285 
286   return elist;
287 }
288 
289 /* This function free's memory associated with an edge list.  */
290 
291 void
free_edge_list(struct edge_list * elist)292 free_edge_list (struct edge_list *elist)
293 {
294   if (elist)
295     {
296       free (elist->index_to_edge);
297       free (elist);
298     }
299 }
300 
301 /* This function provides debug output showing an edge list.  */
302 
303 DEBUG_FUNCTION void
print_edge_list(FILE * f,struct edge_list * elist)304 print_edge_list (FILE *f, struct edge_list *elist)
305 {
306   int x;
307 
308   fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
309 	   n_basic_blocks_for_fn (cfun), elist->num_edges);
310 
311   for (x = 0; x < elist->num_edges; x++)
312     {
313       fprintf (f, " %-4d - edge(", x);
314       if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR_FOR_FN (cfun))
315 	fprintf (f, "entry,");
316       else
317 	fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
318 
319       if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR_FOR_FN (cfun))
320 	fprintf (f, "exit)\n");
321       else
322 	fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
323     }
324 }
325 
326 /* This function provides an internal consistency check of an edge list,
327    verifying that all edges are present, and that there are no
328    extra edges.  */
329 
330 DEBUG_FUNCTION void
verify_edge_list(FILE * f,struct edge_list * elist)331 verify_edge_list (FILE *f, struct edge_list *elist)
332 {
333   int pred, succ, index;
334   edge e;
335   basic_block bb, p, s;
336   edge_iterator ei;
337 
338   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
339 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
340     {
341       FOR_EACH_EDGE (e, ei, bb->succs)
342 	{
343 	  pred = e->src->index;
344 	  succ = e->dest->index;
345 	  index = EDGE_INDEX (elist, e->src, e->dest);
346 	  if (index == EDGE_INDEX_NO_EDGE)
347 	    {
348 	      fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
349 	      continue;
350 	    }
351 
352 	  if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
353 	    fprintf (f, "*p* Pred for index %d should be %d not %d\n",
354 		     index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
355 	  if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
356 	    fprintf (f, "*p* Succ for index %d should be %d not %d\n",
357 		     index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
358 	}
359     }
360 
361   /* We've verified that all the edges are in the list, now lets make sure
362      there are no spurious edges in the list.  This is an expensive check!  */
363 
364   FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR_FOR_FN (cfun),
365 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
366     FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
367       {
368 	int found_edge = 0;
369 
370 	FOR_EACH_EDGE (e, ei, p->succs)
371 	  if (e->dest == s)
372 	    {
373 	      found_edge = 1;
374 	      break;
375 	    }
376 
377 	FOR_EACH_EDGE (e, ei, s->preds)
378 	  if (e->src == p)
379 	    {
380 	      found_edge = 1;
381 	      break;
382 	    }
383 
384 	if (EDGE_INDEX (elist, p, s)
385 	    == EDGE_INDEX_NO_EDGE && found_edge != 0)
386 	  fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
387 		   p->index, s->index);
388 	if (EDGE_INDEX (elist, p, s)
389 	    != EDGE_INDEX_NO_EDGE && found_edge == 0)
390 	  fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
391 		   p->index, s->index, EDGE_INDEX (elist, p, s));
392       }
393 }
394 
395 
396 /* Functions to compute control dependences.  */
397 
398 /* Indicate block BB is control dependent on an edge with index EDGE_INDEX.  */
399 void
set_control_dependence_map_bit(basic_block bb,int edge_index)400 control_dependences::set_control_dependence_map_bit (basic_block bb,
401 						     int edge_index)
402 {
403   if (bb == ENTRY_BLOCK_PTR_FOR_FN (cfun))
404     return;
405   gcc_assert (bb != EXIT_BLOCK_PTR_FOR_FN (cfun));
406   bitmap_set_bit (&control_dependence_map[bb->index], edge_index);
407 }
408 
409 /* Clear all control dependences for block BB.  */
410 void
clear_control_dependence_bitmap(basic_block bb)411 control_dependences::clear_control_dependence_bitmap (basic_block bb)
412 {
413   bitmap_clear (&control_dependence_map[bb->index]);
414 }
415 
416 /* Determine all blocks' control dependences on the given edge with edge_list
417    EL index EDGE_INDEX, ala Morgan, Section 3.6.  */
418 
419 void
find_control_dependence(int edge_index)420 control_dependences::find_control_dependence (int edge_index)
421 {
422   basic_block current_block;
423   basic_block ending_block;
424 
425   gcc_assert (get_edge_src (edge_index) != EXIT_BLOCK_PTR_FOR_FN (cfun));
426 
427   ending_block = get_immediate_dominator (CDI_POST_DOMINATORS,
428 					  get_edge_src (edge_index));
429 
430   for (current_block = get_edge_dest (edge_index);
431        current_block != ending_block
432        && current_block != EXIT_BLOCK_PTR_FOR_FN (cfun);
433        current_block = get_immediate_dominator (CDI_POST_DOMINATORS,
434 						current_block))
435     set_control_dependence_map_bit (current_block, edge_index);
436 }
437 
438 /* Record all blocks' control dependences on all edges in the edge
439    list EL, ala Morgan, Section 3.6.  */
440 
control_dependences()441 control_dependences::control_dependences ()
442 {
443   timevar_push (TV_CONTROL_DEPENDENCES);
444 
445   /* Initialize the edge list.  */
446   int num_edges = 0;
447   basic_block bb;
448   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
449 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
450     num_edges += EDGE_COUNT (bb->succs);
451   m_el.create (num_edges);
452   edge e;
453   edge_iterator ei;
454   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
455 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
456     FOR_EACH_EDGE (e, ei, bb->succs)
457       {
458 	/* For abnormal edges, we don't make current_block control
459 	   dependent because instructions that throw are always necessary
460 	   anyway.  */
461 	if (e->flags & EDGE_ABNORMAL)
462 	  {
463 	    num_edges--;
464 	    continue;
465 	  }
466 	m_el.quick_push (std::make_pair (e->src->index, e->dest->index));
467       }
468 
469   bitmap_obstack_initialize (&m_bitmaps);
470   control_dependence_map.create (last_basic_block_for_fn (cfun));
471   control_dependence_map.quick_grow (last_basic_block_for_fn (cfun));
472   for (int i = 0; i < last_basic_block_for_fn (cfun); ++i)
473     bitmap_initialize (&control_dependence_map[i], &m_bitmaps);
474   for (int i = 0; i < num_edges; ++i)
475     find_control_dependence (i);
476 
477   timevar_pop (TV_CONTROL_DEPENDENCES);
478 }
479 
480 /* Free control dependences and the associated edge list.  */
481 
~control_dependences()482 control_dependences::~control_dependences ()
483 {
484   control_dependence_map.release ();
485   m_el.release ();
486   bitmap_obstack_release (&m_bitmaps);
487 }
488 
489 /* Returns the bitmap of edges the basic-block I is dependent on.  */
490 
491 bitmap
get_edges_dependent_on(int i)492 control_dependences::get_edges_dependent_on (int i)
493 {
494   return &control_dependence_map[i];
495 }
496 
497 /* Returns the edge source with index I from the edge list.  */
498 
499 basic_block
get_edge_src(int i)500 control_dependences::get_edge_src (int i)
501 {
502   return BASIC_BLOCK_FOR_FN (cfun, m_el[i].first);
503 }
504 
505 /* Returns the edge destination with index I from the edge list.  */
506 
507 basic_block
get_edge_dest(int i)508 control_dependences::get_edge_dest (int i)
509 {
510   return BASIC_BLOCK_FOR_FN (cfun, m_el[i].second);
511 }
512 
513 
514 /* Given PRED and SUCC blocks, return the edge which connects the blocks.
515    If no such edge exists, return NULL.  */
516 
517 edge
find_edge(basic_block pred,basic_block succ)518 find_edge (basic_block pred, basic_block succ)
519 {
520   edge e;
521   edge_iterator ei;
522 
523   if (EDGE_COUNT (pred->succs) <= EDGE_COUNT (succ->preds))
524     {
525       FOR_EACH_EDGE (e, ei, pred->succs)
526 	if (e->dest == succ)
527 	  return e;
528     }
529   else
530     {
531       FOR_EACH_EDGE (e, ei, succ->preds)
532 	if (e->src == pred)
533 	  return e;
534     }
535 
536   return NULL;
537 }
538 
539 /* This routine will determine what, if any, edge there is between
540    a specified predecessor and successor.  */
541 
542 int
find_edge_index(struct edge_list * edge_list,basic_block pred,basic_block succ)543 find_edge_index (struct edge_list *edge_list, basic_block pred, basic_block succ)
544 {
545   int x;
546 
547   for (x = 0; x < NUM_EDGES (edge_list); x++)
548     if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
549 	&& INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
550       return x;
551 
552   return (EDGE_INDEX_NO_EDGE);
553 }
554 
555 /* This routine will remove any fake predecessor edges for a basic block.
556    When the edge is removed, it is also removed from whatever successor
557    list it is in.  */
558 
559 static void
remove_fake_predecessors(basic_block bb)560 remove_fake_predecessors (basic_block bb)
561 {
562   edge e;
563   edge_iterator ei;
564 
565   for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
566     {
567       if ((e->flags & EDGE_FAKE) == EDGE_FAKE)
568 	remove_edge (e);
569       else
570 	ei_next (&ei);
571     }
572 }
573 
574 /* This routine will remove all fake edges from the flow graph.  If
575    we remove all fake successors, it will automatically remove all
576    fake predecessors.  */
577 
578 void
remove_fake_edges(void)579 remove_fake_edges (void)
580 {
581   basic_block bb;
582 
583   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb, NULL, next_bb)
584     remove_fake_predecessors (bb);
585 }
586 
587 /* This routine will remove all fake edges to the EXIT_BLOCK.  */
588 
589 void
remove_fake_exit_edges(void)590 remove_fake_exit_edges (void)
591 {
592   remove_fake_predecessors (EXIT_BLOCK_PTR_FOR_FN (cfun));
593 }
594 
595 
596 /* This function will add a fake edge between any block which has no
597    successors, and the exit block. Some data flow equations require these
598    edges to exist.  */
599 
600 void
add_noreturn_fake_exit_edges(void)601 add_noreturn_fake_exit_edges (void)
602 {
603   basic_block bb;
604 
605   FOR_EACH_BB_FN (bb, cfun)
606     if (EDGE_COUNT (bb->succs) == 0)
607       make_single_succ_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), EDGE_FAKE);
608 }
609 
610 /* This function adds a fake edge between any noreturn block and
611    infinite loops to the exit block.  Some optimizations require a path
612    from each node to the exit node.
613 
614    See also Morgan, Figure 3.10, pp. 82-83.
615 
616    The current implementation is ugly, not attempting to minimize the
617    number of inserted fake edges.  To reduce the number of fake edges
618    to insert, add fake edges from _innermost_ loops containing only
619    nodes not reachable from the exit block.  */
620 
621 void
connect_infinite_loops_to_exit(void)622 connect_infinite_loops_to_exit (void)
623 {
624   /* First add fake exits to noreturn blocks, this is required to
625      discover only truly infinite loops below.  */
626   add_noreturn_fake_exit_edges ();
627 
628   /* Perform depth-first search in the reverse graph to find nodes
629      reachable from the exit block.  */
630   depth_first_search dfs;
631   dfs.add_bb (EXIT_BLOCK_PTR_FOR_FN (cfun));
632 
633   /* Repeatedly add fake edges, updating the unreachable nodes.  */
634   basic_block unvisited_block = EXIT_BLOCK_PTR_FOR_FN (cfun);
635   while (1)
636     {
637       unvisited_block = dfs.execute (unvisited_block);
638       if (!unvisited_block)
639 	break;
640 
641       basic_block deadend_block = dfs_find_deadend (unvisited_block);
642       edge e = make_edge (deadend_block, EXIT_BLOCK_PTR_FOR_FN (cfun),
643 			  EDGE_FAKE);
644       e->probability = profile_probability::never ();
645       dfs.add_bb (deadend_block);
646     }
647 }
648 
649 /* Compute reverse top sort order.  This is computing a post order
650    numbering of the graph.  If INCLUDE_ENTRY_EXIT is true, then
651    ENTRY_BLOCK and EXIT_BLOCK are included.  If DELETE_UNREACHABLE is
652    true, unreachable blocks are deleted.  */
653 
654 int
post_order_compute(int * post_order,bool include_entry_exit,bool delete_unreachable)655 post_order_compute (int *post_order, bool include_entry_exit,
656 		    bool delete_unreachable)
657 {
658   int post_order_num = 0;
659   int count;
660 
661   if (include_entry_exit)
662     post_order[post_order_num++] = EXIT_BLOCK;
663 
664   /* Allocate stack for back-tracking up CFG.  */
665   auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
666 
667   /* Allocate bitmap to track nodes that have been visited.  */
668   auto_sbitmap visited (last_basic_block_for_fn (cfun));
669 
670   /* None of the nodes in the CFG have been visited yet.  */
671   bitmap_clear (visited);
672 
673   /* Push the first edge on to the stack.  */
674   stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (cfun)->succs));
675 
676   while (!stack.is_empty ())
677     {
678       basic_block src;
679       basic_block dest;
680 
681       /* Look at the edge on the top of the stack.  */
682       edge_iterator ei = stack.last ();
683       src = ei_edge (ei)->src;
684       dest = ei_edge (ei)->dest;
685 
686       /* Check if the edge destination has been visited yet.  */
687       if (dest != EXIT_BLOCK_PTR_FOR_FN (cfun)
688 	  && ! bitmap_bit_p (visited, dest->index))
689 	{
690 	  /* Mark that we have visited the destination.  */
691 	  bitmap_set_bit (visited, dest->index);
692 
693 	  if (EDGE_COUNT (dest->succs) > 0)
694 	    /* Since the DEST node has been visited for the first
695 	       time, check its successors.  */
696 	    stack.quick_push (ei_start (dest->succs));
697 	  else
698 	    post_order[post_order_num++] = dest->index;
699 	}
700       else
701 	{
702 	  if (ei_one_before_end_p (ei)
703 	      && src != ENTRY_BLOCK_PTR_FOR_FN (cfun))
704 	    post_order[post_order_num++] = src->index;
705 
706 	  if (!ei_one_before_end_p (ei))
707 	    ei_next (&stack.last ());
708 	  else
709 	    stack.pop ();
710 	}
711     }
712 
713   if (include_entry_exit)
714     {
715       post_order[post_order_num++] = ENTRY_BLOCK;
716       count = post_order_num;
717     }
718   else
719     count = post_order_num + 2;
720 
721   /* Delete the unreachable blocks if some were found and we are
722      supposed to do it.  */
723   if (delete_unreachable && (count != n_basic_blocks_for_fn (cfun)))
724     {
725       basic_block b;
726       basic_block next_bb;
727       for (b = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; b
728 	   != EXIT_BLOCK_PTR_FOR_FN (cfun); b = next_bb)
729 	{
730 	  next_bb = b->next_bb;
731 
732 	  if (!(bitmap_bit_p (visited, b->index)))
733 	    delete_basic_block (b);
734 	}
735 
736       tidy_fallthru_edges ();
737     }
738 
739   return post_order_num;
740 }
741 
742 
743 /* Helper routine for inverted_post_order_compute
744    flow_dfs_compute_reverse_execute, and the reverse-CFG
745    deapth first search in dominance.cc.
746    BB has to belong to a region of CFG
747    unreachable by inverted traversal from the exit.
748    i.e. there's no control flow path from ENTRY to EXIT
749    that contains this BB.
750    This can happen in two cases - if there's an infinite loop
751    or if there's a block that has no successor
752    (call to a function with no return).
753    Some RTL passes deal with this condition by
754    calling connect_infinite_loops_to_exit () and/or
755    add_noreturn_fake_exit_edges ().
756    However, those methods involve modifying the CFG itself
757    which may not be desirable.
758    Hence, we deal with the infinite loop/no return cases
759    by identifying a unique basic block that can reach all blocks
760    in such a region by inverted traversal.
761    This function returns a basic block that guarantees
762    that all blocks in the region are reachable
763    by starting an inverted traversal from the returned block.  */
764 
765 basic_block
dfs_find_deadend(basic_block bb)766 dfs_find_deadend (basic_block bb)
767 {
768   auto_bitmap visited;
769   basic_block next = bb;
770 
771   for (;;)
772     {
773       if (EDGE_COUNT (next->succs) == 0)
774 	return next;
775 
776       if (! bitmap_set_bit (visited, next->index))
777 	return bb;
778 
779       bb = next;
780       /* If we are in an analyzed cycle make sure to try exiting it.
781          Note this is a heuristic only and expected to work when loop
782 	 fixup is needed as well.  */
783       if (! bb->loop_father
784 	  || ! loop_outer (bb->loop_father))
785 	next = EDGE_SUCC (bb, 0)->dest;
786       else
787 	{
788 	  edge_iterator ei;
789 	  edge e;
790 	  FOR_EACH_EDGE (e, ei, bb->succs)
791 	    if (loop_exit_edge_p (bb->loop_father, e))
792 	      break;
793 	  next = e ? e->dest : EDGE_SUCC (bb, 0)->dest;
794 	}
795     }
796 }
797 
798 
799 /* Compute the reverse top sort order of the inverted CFG
800    i.e. starting from the exit block and following the edges backward
801    (from successors to predecessors).
802    This ordering can be used for forward dataflow problems among others.
803 
804    Optionally if START_POINTS is specified, start from exit block and all
805    basic blocks in START_POINTS.  This is used by CD-DCE.
806 
807    This function assumes that all blocks in the CFG are reachable
808    from the ENTRY (but not necessarily from EXIT).
809 
810    If there's an infinite loop,
811    a simple inverted traversal starting from the blocks
812    with no successors can't visit all blocks.
813    To solve this problem, we first do inverted traversal
814    starting from the blocks with no successor.
815    And if there's any block left that's not visited by the regular
816    inverted traversal from EXIT,
817    those blocks are in such problematic region.
818    Among those, we find one block that has
819    any visited predecessor (which is an entry into such a region),
820    and start looking for a "dead end" from that block
821    and do another inverted traversal from that block.  */
822 
823 void
inverted_post_order_compute(vec<int> * post_order,sbitmap * start_points)824 inverted_post_order_compute (vec<int> *post_order,
825 			     sbitmap *start_points)
826 {
827   basic_block bb;
828   post_order->reserve_exact (n_basic_blocks_for_fn (cfun));
829 
830   if (flag_checking)
831     verify_no_unreachable_blocks ();
832 
833   /* Allocate stack for back-tracking up CFG.  */
834   auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (cfun) + 1);
835 
836   /* Allocate bitmap to track nodes that have been visited.  */
837   auto_sbitmap visited (last_basic_block_for_fn (cfun));
838 
839   /* None of the nodes in the CFG have been visited yet.  */
840   bitmap_clear (visited);
841 
842   if (start_points)
843     {
844       FOR_ALL_BB_FN (bb, cfun)
845         if (bitmap_bit_p (*start_points, bb->index)
846 	    && EDGE_COUNT (bb->preds) > 0)
847 	  {
848 	    stack.quick_push (ei_start (bb->preds));
849             bitmap_set_bit (visited, bb->index);
850 	  }
851       if (EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds))
852 	{
853 	  stack.quick_push (ei_start (EXIT_BLOCK_PTR_FOR_FN (cfun)->preds));
854           bitmap_set_bit (visited, EXIT_BLOCK_PTR_FOR_FN (cfun)->index);
855 	}
856     }
857   else
858   /* Put all blocks that have no successor into the initial work list.  */
859   FOR_ALL_BB_FN (bb, cfun)
860     if (EDGE_COUNT (bb->succs) == 0)
861       {
862         /* Push the initial edge on to the stack.  */
863         if (EDGE_COUNT (bb->preds) > 0)
864           {
865 	    stack.quick_push (ei_start (bb->preds));
866             bitmap_set_bit (visited, bb->index);
867           }
868       }
869 
870   do
871     {
872       bool has_unvisited_bb = false;
873 
874       /* The inverted traversal loop. */
875       while (!stack.is_empty ())
876         {
877           edge_iterator ei;
878           basic_block pred;
879 
880           /* Look at the edge on the top of the stack.  */
881 	  ei = stack.last ();
882           bb = ei_edge (ei)->dest;
883           pred = ei_edge (ei)->src;
884 
885           /* Check if the predecessor has been visited yet.  */
886           if (! bitmap_bit_p (visited, pred->index))
887             {
888               /* Mark that we have visited the destination.  */
889               bitmap_set_bit (visited, pred->index);
890 
891               if (EDGE_COUNT (pred->preds) > 0)
892                 /* Since the predecessor node has been visited for the first
893                    time, check its predecessors.  */
894 		stack.quick_push (ei_start (pred->preds));
895               else
896 		post_order->quick_push (pred->index);
897             }
898           else
899             {
900 	      if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun)
901 		  && ei_one_before_end_p (ei))
902 		post_order->quick_push (bb->index);
903 
904               if (!ei_one_before_end_p (ei))
905 		ei_next (&stack.last ());
906               else
907 		stack.pop ();
908             }
909         }
910 
911       /* Detect any infinite loop and activate the kludge.
912          Note that this doesn't check EXIT_BLOCK itself
913 	 since EXIT_BLOCK is always added after the outer do-while loop.  */
914       FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
915 		      EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
916         if (!bitmap_bit_p (visited, bb->index))
917           {
918             has_unvisited_bb = true;
919 
920             if (EDGE_COUNT (bb->preds) > 0)
921               {
922                 edge_iterator ei;
923                 edge e;
924                 basic_block visited_pred = NULL;
925 
926                 /* Find an already visited predecessor.  */
927                 FOR_EACH_EDGE (e, ei, bb->preds)
928                   {
929                     if (bitmap_bit_p (visited, e->src->index))
930                       visited_pred = e->src;
931                   }
932 
933                 if (visited_pred)
934                   {
935                     basic_block be = dfs_find_deadend (bb);
936                     gcc_assert (be != NULL);
937                     bitmap_set_bit (visited, be->index);
938 		    stack.quick_push (ei_start (be->preds));
939                     break;
940                   }
941               }
942           }
943 
944       if (has_unvisited_bb && stack.is_empty ())
945         {
946 	  /* No blocks are reachable from EXIT at all.
947              Find a dead-end from the ENTRY, and restart the iteration. */
948 	  basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR_FOR_FN (cfun));
949           gcc_assert (be != NULL);
950           bitmap_set_bit (visited, be->index);
951 	  stack.quick_push (ei_start (be->preds));
952         }
953 
954       /* The only case the below while fires is
955          when there's an infinite loop.  */
956     }
957   while (!stack.is_empty ());
958 
959   /* EXIT_BLOCK is always included.  */
960   post_order->quick_push (EXIT_BLOCK);
961 }
962 
963 /* Compute the depth first search order of FN and store in the array
964    PRE_ORDER if nonzero.  If REV_POST_ORDER is nonzero, return the
965    reverse completion number for each node.  Returns the number of nodes
966    visited.  A depth first search tries to get as far away from the starting
967    point as quickly as possible.
968 
969    In case the function has unreachable blocks the number of nodes
970    visited does not include them.
971 
972    pre_order is a really a preorder numbering of the graph.
973    rev_post_order is really a reverse postorder numbering of the graph.  */
974 
975 int
pre_and_rev_post_order_compute_fn(struct function * fn,int * pre_order,int * rev_post_order,bool include_entry_exit)976 pre_and_rev_post_order_compute_fn (struct function *fn,
977 				   int *pre_order, int *rev_post_order,
978 				   bool include_entry_exit)
979 {
980   int pre_order_num = 0;
981   int rev_post_order_num = n_basic_blocks_for_fn (fn) - 1;
982 
983   /* Allocate stack for back-tracking up CFG.  */
984   auto_vec<edge_iterator, 20> stack (n_basic_blocks_for_fn (fn) + 1);
985 
986   if (include_entry_exit)
987     {
988       if (pre_order)
989 	pre_order[pre_order_num] = ENTRY_BLOCK;
990       pre_order_num++;
991       if (rev_post_order)
992 	rev_post_order[rev_post_order_num--] = EXIT_BLOCK;
993     }
994   else
995     rev_post_order_num -= NUM_FIXED_BLOCKS;
996 
997   /* BB flag to track nodes that have been visited.  */
998   auto_bb_flag visited (fn);
999 
1000   /* Push the first edge on to the stack.  */
1001   stack.quick_push (ei_start (ENTRY_BLOCK_PTR_FOR_FN (fn)->succs));
1002 
1003   while (!stack.is_empty ())
1004     {
1005       basic_block src;
1006       basic_block dest;
1007 
1008       /* Look at the edge on the top of the stack.  */
1009       edge_iterator ei = stack.last ();
1010       src = ei_edge (ei)->src;
1011       dest = ei_edge (ei)->dest;
1012 
1013       /* Check if the edge destination has been visited yet.  */
1014       if (dest != EXIT_BLOCK_PTR_FOR_FN (fn)
1015 	  && ! (dest->flags & visited))
1016 	{
1017 	  /* Mark that we have visited the destination.  */
1018 	  dest->flags |= visited;
1019 
1020 	  if (pre_order)
1021 	    pre_order[pre_order_num] = dest->index;
1022 
1023 	  pre_order_num++;
1024 
1025 	  if (EDGE_COUNT (dest->succs) > 0)
1026 	    /* Since the DEST node has been visited for the first
1027 	       time, check its successors.  */
1028 	    stack.quick_push (ei_start (dest->succs));
1029 	  else if (rev_post_order)
1030 	    /* There are no successors for the DEST node so assign
1031 	       its reverse completion number.  */
1032 	    rev_post_order[rev_post_order_num--] = dest->index;
1033 	}
1034       else
1035 	{
1036 	  if (ei_one_before_end_p (ei)
1037 	      && src != ENTRY_BLOCK_PTR_FOR_FN (fn)
1038 	      && rev_post_order)
1039 	    /* There are no more successors for the SRC node
1040 	       so assign its reverse completion number.  */
1041 	    rev_post_order[rev_post_order_num--] = src->index;
1042 
1043 	  if (!ei_one_before_end_p (ei))
1044 	    ei_next (&stack.last ());
1045 	  else
1046 	    stack.pop ();
1047 	}
1048     }
1049 
1050   if (include_entry_exit)
1051     {
1052       if (pre_order)
1053 	pre_order[pre_order_num] = EXIT_BLOCK;
1054       pre_order_num++;
1055       if (rev_post_order)
1056 	rev_post_order[rev_post_order_num--] = ENTRY_BLOCK;
1057     }
1058 
1059   /* Clear the temporarily allocated flag.  */
1060   if (!rev_post_order)
1061     rev_post_order = pre_order;
1062   for (int i = 0; i < pre_order_num; ++i)
1063     BASIC_BLOCK_FOR_FN (fn, rev_post_order[i])->flags &= ~visited;
1064 
1065   return pre_order_num;
1066 }
1067 
1068 /* Like pre_and_rev_post_order_compute_fn but operating on the
1069    current function and asserting that all nodes were visited.  */
1070 
1071 int
pre_and_rev_post_order_compute(int * pre_order,int * rev_post_order,bool include_entry_exit)1072 pre_and_rev_post_order_compute (int *pre_order, int *rev_post_order,
1073 				bool include_entry_exit)
1074 {
1075   int pre_order_num
1076     = pre_and_rev_post_order_compute_fn (cfun, pre_order, rev_post_order,
1077 					 include_entry_exit);
1078   if (include_entry_exit)
1079     /* The number of nodes visited should be the number of blocks.  */
1080     gcc_assert (pre_order_num == n_basic_blocks_for_fn (cfun));
1081   else
1082     /* The number of nodes visited should be the number of blocks minus
1083        the entry and exit blocks which are not visited here.  */
1084     gcc_assert (pre_order_num
1085 		== (n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS));
1086 
1087   return pre_order_num;
1088 }
1089 
1090 
1091 /* Per basic-block data for rev_post_order_and_mark_dfs_back_seme,
1092    element of a sparsely populated array indexed by basic-block number.  */
1093 typedef auto_vec<int, 2> scc_exit_vec_t;
1094 struct rpoamdbs_bb_data {
1095     int depth;
1096     int bb_to_pre;
1097     /* The basic-block index of the SCC entry of the block visited first
1098        (the SCC leader).  */
1099     int scc;
1100     /* The index into the RPO array where the blocks SCC entries end
1101        (only valid for the SCC leader).  */
1102     int scc_end;
1103     /* The indexes of the exits destinations of this SCC (only valid
1104        for the SCC leader).  Initialized upon discovery of SCC leaders.  */
1105     scc_exit_vec_t scc_exits;
1106 };
1107 
1108 /* Tag H as a header of B, weaving H and its loop header list into the
1109    current loop header list of B.  */
1110 
1111 static void
tag_header(int b,int h,rpoamdbs_bb_data * bb_data)1112 tag_header (int b, int h, rpoamdbs_bb_data *bb_data)
1113 {
1114   if (h == -1 || b == h)
1115     return;
1116   int cur1 = b;
1117   int cur2 = h;
1118   while (bb_data[cur1].scc != -1)
1119     {
1120       int ih = bb_data[cur1].scc;
1121       if (ih == cur2)
1122 	return;
1123       if (bb_data[ih].depth < bb_data[cur2].depth)
1124 	{
1125 	  bb_data[cur1].scc = cur2;
1126 	  cur1 = cur2;
1127 	  cur2 = ih;
1128 	}
1129       else
1130 	cur1 = ih;
1131     }
1132   bb_data[cur1].scc = cur2;
1133 }
1134 
1135 /* Comparator for a sort of two edges destinations E1 and E2 after their index
1136    in the PRE array as specified by BB_TO_PRE.  */
1137 
1138 static int
cmp_edge_dest_pre(const void * e1_,const void * e2_,void * data_)1139 cmp_edge_dest_pre (const void *e1_, const void *e2_, void *data_)
1140 {
1141   const int *e1 = (const int *)e1_;
1142   const int *e2 = (const int *)e2_;
1143   rpoamdbs_bb_data *bb_data = (rpoamdbs_bb_data *)data_;
1144   return (bb_data[*e1].bb_to_pre - bb_data[*e2].bb_to_pre);
1145 }
1146 
1147 /* Compute the reverse completion number of a depth first search
1148    on the SEME region denoted by the ENTRY edge and the EXIT_BBS set of
1149    exit block indexes and store it in the array REV_POST_ORDER.
1150    Also sets the EDGE_DFS_BACK edge flags according to this visitation
1151    order.
1152    Returns the number of nodes visited.
1153 
1154    In case the function has unreachable blocks the number of nodes
1155    visited does not include them.
1156 
1157    If FOR_ITERATION is true then compute an RPO where SCCs form a
1158    contiguous region in the RPO array.
1159    *TOPLEVEL_SCC_EXTENTS if not NULL is filled with pairs of
1160    *REV_POST_ORDER indexes denoting extents of the toplevel SCCs in
1161    this region.  */
1162 
1163 int
rev_post_order_and_mark_dfs_back_seme(struct function * fn,edge entry,bitmap exit_bbs,bool for_iteration,int * rev_post_order,vec<std::pair<int,int>> * toplevel_scc_extents)1164 rev_post_order_and_mark_dfs_back_seme (struct function *fn, edge entry,
1165 				       bitmap exit_bbs, bool for_iteration,
1166 				       int *rev_post_order,
1167 				       vec<std::pair<int, int> >
1168 					 *toplevel_scc_extents)
1169 {
1170   int rev_post_order_num = 0;
1171 
1172   /* BB flag to track nodes that have been visited.  */
1173   auto_bb_flag visited (fn);
1174 
1175   /* Lazily initialized per-BB data for the two DFS walks below.  */
1176   rpoamdbs_bb_data *bb_data
1177     = XNEWVEC (rpoamdbs_bb_data, last_basic_block_for_fn (fn));
1178 
1179   /* First DFS walk, loop discovery according to
1180       A New Algorithm for Identifying Loops in Decompilation
1181      by Tao Wei, Jian Mao, Wei Zou and You Chen of the Institute of
1182      Computer Science and Technology of the Peking University.  */
1183   auto_vec<edge_iterator, 20> ei_stack (n_basic_blocks_for_fn (fn) + 1);
1184   auto_bb_flag is_header (fn);
1185   int depth = 1;
1186   unsigned n_sccs = 0;
1187 
1188   basic_block dest = entry->dest;
1189   edge_iterator ei;
1190   int pre_num = 0;
1191 
1192   /* DFS process DEST.  */
1193 find_loops:
1194   bb_data[dest->index].bb_to_pre = pre_num++;
1195   bb_data[dest->index].depth = depth;
1196   bb_data[dest->index].scc = -1;
1197   depth++;
1198   gcc_assert ((dest->flags & (is_header|visited)) == 0);
1199   dest->flags |= visited;
1200   ei = ei_start (dest->succs);
1201   while (!ei_end_p (ei))
1202     {
1203       ei_edge (ei)->flags &= ~EDGE_DFS_BACK;
1204       if (bitmap_bit_p (exit_bbs, ei_edge (ei)->dest->index))
1205 	;
1206       else if (!(ei_edge (ei)->dest->flags & visited))
1207 	{
1208 	  ei_stack.quick_push (ei);
1209 	  dest = ei_edge (ei)->dest;
1210 	  /* DFS recurse on DEST.  */
1211 	  goto find_loops;
1212 
1213 ret_from_find_loops:
1214 	  /* Return point of DFS recursion.  */
1215 	  ei = ei_stack.pop ();
1216 	  dest = ei_edge (ei)->src;
1217 	  int header = bb_data[ei_edge (ei)->dest->index].scc;
1218 	  tag_header (dest->index, header, bb_data);
1219 	  depth = bb_data[dest->index].depth + 1;
1220 	}
1221       else
1222 	{
1223 	  if (bb_data[ei_edge (ei)->dest->index].depth > 0) /* on the stack */
1224 	    {
1225 	      ei_edge (ei)->flags |= EDGE_DFS_BACK;
1226 	      n_sccs++;
1227 	      ei_edge (ei)->dest->flags |= is_header;
1228 	      ::new (&bb_data[ei_edge (ei)->dest->index].scc_exits)
1229 		auto_vec<int, 2> ();
1230 	      tag_header (dest->index, ei_edge (ei)->dest->index, bb_data);
1231 	    }
1232 	  else if (bb_data[ei_edge (ei)->dest->index].scc == -1)
1233 	    ;
1234 	  else
1235 	    {
1236 	      int header = bb_data[ei_edge (ei)->dest->index].scc;
1237 	      if (bb_data[header].depth > 0)
1238 		tag_header (dest->index, header, bb_data);
1239 	      else
1240 		{
1241 		  /* A re-entry into an existing loop.  */
1242 		  /* ???  Need to mark is_header?  */
1243 		  while (bb_data[header].scc != -1)
1244 		    {
1245 		      header = bb_data[header].scc;
1246 		      if (bb_data[header].depth > 0)
1247 			{
1248 			  tag_header (dest->index, header, bb_data);
1249 			  break;
1250 			}
1251 		    }
1252 		}
1253 	    }
1254 	}
1255       ei_next (&ei);
1256     }
1257   rev_post_order[rev_post_order_num++] = dest->index;
1258   /* not on the stack anymore */
1259   bb_data[dest->index].depth = -bb_data[dest->index].depth;
1260   if (!ei_stack.is_empty ())
1261     /* Return from DFS recursion.  */
1262     goto ret_from_find_loops;
1263 
1264   /* Optimize for no SCCs found or !for_iteration.  */
1265   if (n_sccs == 0 || !for_iteration)
1266     {
1267       /* Clear the temporarily allocated flags.  */
1268       for (int i = 0; i < rev_post_order_num; ++i)
1269 	BASIC_BLOCK_FOR_FN (fn, rev_post_order[i])->flags
1270 	  &= ~(is_header|visited);
1271       /* And swap elements.  */
1272       for (int i = 0; i < rev_post_order_num/2; ++i)
1273 	std::swap (rev_post_order[i], rev_post_order[rev_post_order_num-i-1]);
1274       XDELETEVEC (bb_data);
1275 
1276       return rev_post_order_num;
1277     }
1278 
1279   /* Next find SCC exits, clear the visited flag and compute an upper bound
1280      for the edge stack below.  */
1281   unsigned edge_count = 0;
1282   for (int i = 0; i < rev_post_order_num; ++i)
1283     {
1284       int bb = rev_post_order[i];
1285       BASIC_BLOCK_FOR_FN (fn, bb)->flags &= ~visited;
1286       edge e;
1287       FOR_EACH_EDGE (e, ei, BASIC_BLOCK_FOR_FN (fn, bb)->succs)
1288 	{
1289 	  if (bitmap_bit_p (exit_bbs, e->dest->index))
1290 	    continue;
1291 	  edge_count++;
1292 	  /* if e is an exit from e->src, record it for
1293 	     bb_data[e->src].scc.  */
1294 	  int src_scc = e->src->index;
1295 	  if (!(e->src->flags & is_header))
1296 	    src_scc = bb_data[src_scc].scc;
1297 	  if (src_scc == -1)
1298 	    continue;
1299 	  int dest_scc = e->dest->index;
1300 	  if (!(e->dest->flags & is_header))
1301 	    dest_scc = bb_data[dest_scc].scc;
1302 	  if (src_scc == dest_scc)
1303 	    continue;
1304 	  /* When dest_scc is nested insde src_scc it's not an
1305 	     exit.  */
1306 	  int tem_dest_scc = dest_scc;
1307 	  unsigned dest_scc_depth = 0;
1308 	  while (tem_dest_scc != -1)
1309 	    {
1310 	      dest_scc_depth++;
1311 	      if ((tem_dest_scc = bb_data[tem_dest_scc].scc) == src_scc)
1312 		break;
1313 	    }
1314 	  if (tem_dest_scc != -1)
1315 	    continue;
1316 	  /* When src_scc is nested inside dest_scc record an
1317 	     exit from the outermost SCC this edge exits.  */
1318 	  int tem_src_scc = src_scc;
1319 	  unsigned src_scc_depth = 0;
1320 	  while (tem_src_scc != -1)
1321 	    {
1322 	      if (bb_data[tem_src_scc].scc == dest_scc)
1323 		{
1324 		  edge_count++;
1325 		  bb_data[tem_src_scc].scc_exits.safe_push (e->dest->index);
1326 		  break;
1327 		}
1328 	      tem_src_scc = bb_data[tem_src_scc].scc;
1329 	      src_scc_depth++;
1330 	    }
1331 	  /* Else find the outermost SCC this edge exits (exits
1332 	     from the inner SCCs are not important for the DFS
1333 	     walk adjustment).  Do so by computing the common
1334 	     ancestor SCC where the immediate child it to the source
1335 	     SCC is the exited SCC.  */
1336 	  if (tem_src_scc == -1)
1337 	    {
1338 	      edge_count++;
1339 	      while (src_scc_depth > dest_scc_depth)
1340 		{
1341 		  src_scc = bb_data[src_scc].scc;
1342 		  src_scc_depth--;
1343 		}
1344 	      while (dest_scc_depth > src_scc_depth)
1345 		{
1346 		  dest_scc = bb_data[dest_scc].scc;
1347 		  dest_scc_depth--;
1348 		}
1349 	      while (bb_data[src_scc].scc != bb_data[dest_scc].scc)
1350 		{
1351 		  src_scc = bb_data[src_scc].scc;
1352 		  dest_scc = bb_data[dest_scc].scc;
1353 		}
1354 	      bb_data[src_scc].scc_exits.safe_push (e->dest->index);
1355 	    }
1356 	}
1357     }
1358 
1359   /* Now the second DFS walk to compute a RPO where the extent of SCCs
1360      is minimized thus SCC members are adjacent in the RPO array.
1361      This is done by performing a DFS walk computing RPO with first visiting
1362      extra direct edges from SCC entry to its exits.
1363      That simulates a DFS walk over the graph with SCCs collapsed and
1364      walking the SCCs themselves only when all outgoing edges from the
1365      SCCs have been visited.
1366      SCC_END[scc-header-index] is the position in the RPO array of the
1367      last member of the SCC.  */
1368   auto_vec<std::pair<basic_block, basic_block>, 20> estack (edge_count + 1);
1369   int idx = rev_post_order_num;
1370   basic_block edest;
1371   dest = entry->dest;
1372 
1373   /* DFS process DEST.  */
1374 dfs_rpo:
1375   gcc_checking_assert ((dest->flags & visited) == 0);
1376   /* Verify we enter SCCs through the same header and SCC nesting appears
1377      the same.  */
1378   gcc_assert (bb_data[dest->index].scc == -1
1379 	      || (BASIC_BLOCK_FOR_FN (fn, bb_data[dest->index].scc)->flags
1380 		  & visited));
1381   dest->flags |= visited;
1382   bb_data[dest->index].scc_end = -1;
1383   if ((dest->flags & is_header)
1384       && !bb_data[dest->index].scc_exits.is_empty ())
1385     {
1386       /* Push the all SCC exits as outgoing edges from its header to
1387 	 be visited first.
1388 	 To process exits in the same relative order as in the first
1389 	 DFS walk sort them after their destination PRE order index.  */
1390       gcc_sort_r (&bb_data[dest->index].scc_exits[0],
1391 		  bb_data[dest->index].scc_exits.length (),
1392 		  sizeof (int), cmp_edge_dest_pre, bb_data);
1393       /* Process edges in reverse to match previous DFS walk order.  */
1394       for (int i = bb_data[dest->index].scc_exits.length () - 1; i >= 0; --i)
1395 	estack.quick_push (std::make_pair
1396 	  (dest, BASIC_BLOCK_FOR_FN (fn, bb_data[dest->index].scc_exits[i])));
1397     }
1398   else
1399     {
1400       if (dest->flags & is_header)
1401 	bb_data[dest->index].scc_end = idx - 1;
1402       /* Push the edge vector in reverse to match the iteration order
1403 	 from the DFS walk above.  */
1404       for (int i = EDGE_COUNT (dest->succs) - 1; i >= 0; --i)
1405 	if (!bitmap_bit_p (exit_bbs, EDGE_SUCC (dest, i)->dest->index))
1406 	  estack.quick_push (std::make_pair (dest,
1407 					     EDGE_SUCC (dest, i)->dest));
1408     }
1409   while (!estack.is_empty ()
1410 	 && estack.last ().first == dest)
1411     {
1412       edest = estack.last ().second;
1413       if (!(edest->flags & visited))
1414 	{
1415 	  dest = edest;
1416 	  /* DFS recurse on DEST.  */
1417 	  goto dfs_rpo;
1418 
1419 ret_from_dfs_rpo:
1420 	  /* Return point of DFS recursion.  */
1421 	  dest = estack.last ().first;
1422 	}
1423       estack.pop ();
1424       /* If we processed all SCC exits from DEST mark the SCC end
1425 	 since all RPO entries up to DEST itself will now belong
1426 	 to its SCC.  The special-case of no SCC exits is already
1427 	 dealt with above.  */
1428       if (dest->flags & is_header
1429 	  /* When the last exit edge was processed mark the SCC end
1430 	     and push the regular edges.  */
1431 	  && bb_data[dest->index].scc_end == -1
1432 	  && (estack.is_empty ()
1433 	      || estack.last ().first != dest))
1434 	{
1435 	  bb_data[dest->index].scc_end = idx - 1;
1436 	  /* Push the edge vector in reverse to match the iteration order
1437 	     from the DFS walk above.  */
1438 	  for (int i = EDGE_COUNT (dest->succs) - 1; i >= 0; --i)
1439 	    if (!bitmap_bit_p (exit_bbs, EDGE_SUCC (dest, i)->dest->index))
1440 	      estack.quick_push (std::make_pair (dest,
1441 						 EDGE_SUCC (dest, i)->dest));
1442 	}
1443     }
1444   rev_post_order[--idx] = dest->index;
1445   if (!estack.is_empty ())
1446     /* Return from DFS recursion.  */
1447     goto ret_from_dfs_rpo;
1448 
1449   /* Each SCC extends are from the position of the header inside
1450      the RPO array up to RPO array index scc_end[header-index].  */
1451   if (toplevel_scc_extents)
1452     for (int i = 0; i < rev_post_order_num; i++)
1453       {
1454 	basic_block bb = BASIC_BLOCK_FOR_FN (fn, rev_post_order[i]);
1455 	if (bb->flags & is_header)
1456 	  {
1457 	    toplevel_scc_extents->safe_push
1458 	      (std::make_pair (i, bb_data[bb->index].scc_end));
1459 	    i = bb_data[bb->index].scc_end;
1460 	  }
1461       }
1462 
1463   /* Clear the temporarily allocated flags and free memory.  */
1464   for (int i = 0; i < rev_post_order_num; ++i)
1465     {
1466       basic_block bb = BASIC_BLOCK_FOR_FN (fn, rev_post_order[i]);
1467       if (bb->flags & is_header)
1468 	bb_data[bb->index].scc_exits.~scc_exit_vec_t ();
1469       bb->flags &= ~(visited|is_header);
1470     }
1471 
1472   XDELETEVEC (bb_data);
1473 
1474   return rev_post_order_num;
1475 }
1476 
1477 
1478 
1479 /* Compute the depth first search order on the _reverse_ graph and
1480    store it in the array DFS_ORDER, marking the nodes visited in VISITED.
1481    Returns the number of nodes visited.
1482 
1483    The computation is split into three pieces:
1484 
1485    flow_dfs_compute_reverse_init () creates the necessary data
1486    structures.
1487 
1488    flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1489    structures.  The block will start the search.
1490 
1491    flow_dfs_compute_reverse_execute () continues (or starts) the
1492    search using the block on the top of the stack, stopping when the
1493    stack is empty.
1494 
1495    flow_dfs_compute_reverse_finish () destroys the necessary data
1496    structures.
1497 
1498    Thus, the user will probably call ..._init(), call ..._add_bb() to
1499    add a beginning basic block to the stack, call ..._execute(),
1500    possibly add another bb to the stack and again call ..._execute(),
1501    ..., and finally call _finish().  */
1502 
1503 /* Initialize the data structures used for depth-first search on the
1504    reverse graph.  If INITIALIZE_STACK is nonzero, the exit block is
1505    added to the basic block stack.  DATA is the current depth-first
1506    search context.  If INITIALIZE_STACK is nonzero, there is an
1507    element on the stack.  */
1508 
depth_first_search()1509 depth_first_search::depth_first_search () :
1510   m_stack (n_basic_blocks_for_fn (cfun)),
1511   m_visited_blocks (last_basic_block_for_fn (cfun))
1512 {
1513   bitmap_clear (m_visited_blocks);
1514 }
1515 
1516 /* Add the specified basic block to the top of the dfs data
1517    structures.  When the search continues, it will start at the
1518    block.  */
1519 
1520 void
add_bb(basic_block bb)1521 depth_first_search::add_bb (basic_block bb)
1522 {
1523   m_stack.quick_push (bb);
1524   bitmap_set_bit (m_visited_blocks, bb->index);
1525 }
1526 
1527 /* Continue the depth-first search through the reverse graph starting with the
1528    block at the stack's top and ending when the stack is empty.  Visited nodes
1529    are marked.  Returns an unvisited basic block, or NULL if there is none
1530    available.  */
1531 
1532 basic_block
execute(basic_block last_unvisited)1533 depth_first_search::execute (basic_block last_unvisited)
1534 {
1535   basic_block bb;
1536   edge e;
1537   edge_iterator ei;
1538 
1539   while (!m_stack.is_empty ())
1540     {
1541       bb = m_stack.pop ();
1542 
1543       /* Perform depth-first search on adjacent vertices.  */
1544       FOR_EACH_EDGE (e, ei, bb->preds)
1545 	if (!bitmap_bit_p (m_visited_blocks, e->src->index))
1546 	  add_bb (e->src);
1547     }
1548 
1549   /* Determine if there are unvisited basic blocks.  */
1550   FOR_BB_BETWEEN (bb, last_unvisited, NULL, prev_bb)
1551     if (!bitmap_bit_p (m_visited_blocks, bb->index))
1552       return bb;
1553 
1554   return NULL;
1555 }
1556 
1557 /* Performs dfs search from BB over vertices satisfying PREDICATE;
1558    if REVERSE, go against direction of edges.  Returns number of blocks
1559    found and their list in RSLT.  RSLT can contain at most RSLT_MAX items.  */
1560 int
dfs_enumerate_from(basic_block bb,int reverse,bool (* predicate)(const_basic_block,const void *),basic_block * rslt,int rslt_max,const void * data)1561 dfs_enumerate_from (basic_block bb, int reverse,
1562 		    bool (*predicate) (const_basic_block, const void *),
1563 		    basic_block *rslt, int rslt_max, const void *data)
1564 {
1565   basic_block *st, lbb;
1566   int sp = 0, tv = 0;
1567 
1568   auto_bb_flag visited (cfun);
1569 
1570 #define MARK_VISITED(BB) ((BB)->flags |= visited)
1571 #define UNMARK_VISITED(BB) ((BB)->flags &= ~visited)
1572 #define VISITED_P(BB) (((BB)->flags & visited) != 0)
1573 
1574   st = XNEWVEC (basic_block, rslt_max);
1575   rslt[tv++] = st[sp++] = bb;
1576   MARK_VISITED (bb);
1577   while (sp)
1578     {
1579       edge e;
1580       edge_iterator ei;
1581       lbb = st[--sp];
1582       if (reverse)
1583 	{
1584 	  FOR_EACH_EDGE (e, ei, lbb->preds)
1585 	    if (!VISITED_P (e->src) && predicate (e->src, data))
1586 	      {
1587 		gcc_assert (tv != rslt_max);
1588 		rslt[tv++] = st[sp++] = e->src;
1589 		MARK_VISITED (e->src);
1590 	      }
1591 	}
1592       else
1593 	{
1594 	  FOR_EACH_EDGE (e, ei, lbb->succs)
1595 	    if (!VISITED_P (e->dest) && predicate (e->dest, data))
1596 	      {
1597 		gcc_assert (tv != rslt_max);
1598 		rslt[tv++] = st[sp++] = e->dest;
1599 		MARK_VISITED (e->dest);
1600 	      }
1601 	}
1602     }
1603   free (st);
1604   for (sp = 0; sp < tv; sp++)
1605     UNMARK_VISITED (rslt[sp]);
1606   return tv;
1607 #undef MARK_VISITED
1608 #undef UNMARK_VISITED
1609 #undef VISITED_P
1610 }
1611 
1612 
1613 /* Compute dominance frontiers, ala Harvey, Ferrante, et al.
1614 
1615    This algorithm can be found in Timothy Harvey's PhD thesis, at
1616    http://www.cs.rice.edu/~harv/dissertation.pdf in the section on iterative
1617    dominance algorithms.
1618 
1619    First, we identify each join point, j (any node with more than one
1620    incoming edge is a join point).
1621 
1622    We then examine each predecessor, p, of j and walk up the dominator tree
1623    starting at p.
1624 
1625    We stop the walk when we reach j's immediate dominator - j is in the
1626    dominance frontier of each of  the nodes in the walk, except for j's
1627    immediate dominator. Intuitively, all of the rest of j's dominators are
1628    shared by j's predecessors as well.
1629    Since they dominate j, they will not have j in their dominance frontiers.
1630 
1631    The number of nodes touched by this algorithm is equal to the size
1632    of the dominance frontiers, no more, no less.
1633 */
1634 
1635 void
compute_dominance_frontiers(bitmap_head * frontiers)1636 compute_dominance_frontiers (bitmap_head *frontiers)
1637 {
1638   timevar_push (TV_DOM_FRONTIERS);
1639 
1640   edge p;
1641   edge_iterator ei;
1642   basic_block b;
1643   FOR_EACH_BB_FN (b, cfun)
1644     {
1645       if (EDGE_COUNT (b->preds) >= 2)
1646 	{
1647 	  basic_block domsb = get_immediate_dominator (CDI_DOMINATORS, b);
1648 	  FOR_EACH_EDGE (p, ei, b->preds)
1649 	    {
1650 	      basic_block runner = p->src;
1651 	      if (runner == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1652 		continue;
1653 
1654 	      while (runner != domsb)
1655 		{
1656 		  if (!bitmap_set_bit (&frontiers[runner->index], b->index))
1657 		    break;
1658 		  runner = get_immediate_dominator (CDI_DOMINATORS, runner);
1659 		}
1660 	    }
1661 	}
1662     }
1663 
1664   timevar_pop (TV_DOM_FRONTIERS);
1665 }
1666 
1667 /* Given a set of blocks with variable definitions (DEF_BLOCKS),
1668    return a bitmap with all the blocks in the iterated dominance
1669    frontier of the blocks in DEF_BLOCKS.  DFS contains dominance
1670    frontier information as returned by compute_dominance_frontiers.
1671 
1672    The resulting set of blocks are the potential sites where PHI nodes
1673    are needed.  The caller is responsible for freeing the memory
1674    allocated for the return value.  */
1675 
1676 bitmap
compute_idf(bitmap def_blocks,bitmap_head * dfs)1677 compute_idf (bitmap def_blocks, bitmap_head *dfs)
1678 {
1679   bitmap_iterator bi;
1680   unsigned bb_index, i;
1681   bitmap phi_insertion_points;
1682 
1683   phi_insertion_points = BITMAP_ALLOC (NULL);
1684 
1685   /* Seed the work set with all the blocks in DEF_BLOCKS.  */
1686   auto_bitmap work_set;
1687   bitmap_copy (work_set, def_blocks);
1688   bitmap_tree_view (work_set);
1689 
1690   /* Pop a block off the workset, add every block that appears in
1691      the original block's DF that we have not already processed to
1692      the workset.  Iterate until the workset is empty.   Blocks
1693      which are added to the workset are potential sites for
1694      PHI nodes.  */
1695   while (!bitmap_empty_p (work_set))
1696     {
1697       /* The dominance frontier of a block is blocks after it so iterating
1698          on earlier blocks first is better.
1699 	 ???  Basic blocks are by no means guaranteed to be ordered in
1700 	 optimal order for this iteration.  */
1701       bb_index = bitmap_first_set_bit (work_set);
1702       bitmap_clear_bit (work_set, bb_index);
1703 
1704       /* Since the registration of NEW -> OLD name mappings is done
1705 	 separately from the call to update_ssa, when updating the SSA
1706 	 form, the basic blocks where new and/or old names are defined
1707 	 may have disappeared by CFG cleanup calls.  In this case,
1708 	 we may pull a non-existing block from the work stack.  */
1709       gcc_checking_assert (bb_index
1710 			   < (unsigned) last_basic_block_for_fn (cfun));
1711 
1712       EXECUTE_IF_AND_COMPL_IN_BITMAP (&dfs[bb_index], phi_insertion_points,
1713 	                              0, i, bi)
1714 	{
1715 	  bitmap_set_bit (work_set, i);
1716 	  bitmap_set_bit (phi_insertion_points, i);
1717 	}
1718     }
1719 
1720   return phi_insertion_points;
1721 }
1722 
1723 /* Intersection and union of preds/succs for sbitmap based data flow
1724    solvers.  All four functions defined below take the same arguments:
1725    B is the basic block to perform the operation for.  DST is the
1726    target sbitmap, i.e. the result.  SRC is an sbitmap vector of size
1727    last_basic_block so that it can be indexed with basic block indices.
1728    DST may be (but does not have to be) SRC[B->index].  */
1729 
1730 /* Set the bitmap DST to the intersection of SRC of successors of
1731    basic block B.  */
1732 
1733 void
bitmap_intersection_of_succs(sbitmap dst,sbitmap * src,basic_block b)1734 bitmap_intersection_of_succs (sbitmap dst, sbitmap *src, basic_block b)
1735 {
1736   unsigned int set_size = dst->size;
1737   edge e;
1738   unsigned ix;
1739 
1740   for (e = NULL, ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1741     {
1742       e = EDGE_SUCC (b, ix);
1743       if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1744 	continue;
1745 
1746       bitmap_copy (dst, src[e->dest->index]);
1747       break;
1748     }
1749 
1750   if (e == 0)
1751     bitmap_ones (dst);
1752   else
1753     for (++ix; ix < EDGE_COUNT (b->succs); ix++)
1754       {
1755 	unsigned int i;
1756 	SBITMAP_ELT_TYPE *p, *r;
1757 
1758 	e = EDGE_SUCC (b, ix);
1759 	if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1760 	  continue;
1761 
1762 	p = src[e->dest->index]->elms;
1763 	r = dst->elms;
1764 	for (i = 0; i < set_size; i++)
1765 	  *r++ &= *p++;
1766       }
1767 }
1768 
1769 /* Set the bitmap DST to the intersection of SRC of predecessors of
1770    basic block B.  */
1771 
1772 void
bitmap_intersection_of_preds(sbitmap dst,sbitmap * src,basic_block b)1773 bitmap_intersection_of_preds (sbitmap dst, sbitmap *src, basic_block b)
1774 {
1775   unsigned int set_size = dst->size;
1776   edge e;
1777   unsigned ix;
1778 
1779   for (e = NULL, ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1780     {
1781       e = EDGE_PRED (b, ix);
1782       if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1783 	continue;
1784 
1785       bitmap_copy (dst, src[e->src->index]);
1786       break;
1787     }
1788 
1789   if (e == 0)
1790     bitmap_ones (dst);
1791   else
1792     for (++ix; ix < EDGE_COUNT (b->preds); ix++)
1793       {
1794 	unsigned int i;
1795 	SBITMAP_ELT_TYPE *p, *r;
1796 
1797 	e = EDGE_PRED (b, ix);
1798 	if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1799 	  continue;
1800 
1801 	p = src[e->src->index]->elms;
1802 	r = dst->elms;
1803 	for (i = 0; i < set_size; i++)
1804 	  *r++ &= *p++;
1805       }
1806 }
1807 
1808 /* Set the bitmap DST to the union of SRC of successors of
1809    basic block B.  */
1810 
1811 void
bitmap_union_of_succs(sbitmap dst,sbitmap * src,basic_block b)1812 bitmap_union_of_succs (sbitmap dst, sbitmap *src, basic_block b)
1813 {
1814   unsigned int set_size = dst->size;
1815   edge e;
1816   unsigned ix;
1817 
1818   for (ix = 0; ix < EDGE_COUNT (b->succs); ix++)
1819     {
1820       e = EDGE_SUCC (b, ix);
1821       if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1822 	continue;
1823 
1824       bitmap_copy (dst, src[e->dest->index]);
1825       break;
1826     }
1827 
1828   if (ix == EDGE_COUNT (b->succs))
1829     bitmap_clear (dst);
1830   else
1831     for (ix++; ix < EDGE_COUNT (b->succs); ix++)
1832       {
1833 	unsigned int i;
1834 	SBITMAP_ELT_TYPE *p, *r;
1835 
1836 	e = EDGE_SUCC (b, ix);
1837 	if (e->dest == EXIT_BLOCK_PTR_FOR_FN (cfun))
1838 	  continue;
1839 
1840 	p = src[e->dest->index]->elms;
1841 	r = dst->elms;
1842 	for (i = 0; i < set_size; i++)
1843 	  *r++ |= *p++;
1844       }
1845 }
1846 
1847 /* Set the bitmap DST to the union of SRC of predecessors of
1848    basic block B.  */
1849 
1850 void
bitmap_union_of_preds(sbitmap dst,sbitmap * src,basic_block b)1851 bitmap_union_of_preds (sbitmap dst, sbitmap *src, basic_block b)
1852 {
1853   unsigned int set_size = dst->size;
1854   edge e;
1855   unsigned ix;
1856 
1857   for (ix = 0; ix < EDGE_COUNT (b->preds); ix++)
1858     {
1859       e = EDGE_PRED (b, ix);
1860       if (e->src== ENTRY_BLOCK_PTR_FOR_FN (cfun))
1861 	continue;
1862 
1863       bitmap_copy (dst, src[e->src->index]);
1864       break;
1865     }
1866 
1867   if (ix == EDGE_COUNT (b->preds))
1868     bitmap_clear (dst);
1869   else
1870     for (ix++; ix < EDGE_COUNT (b->preds); ix++)
1871       {
1872 	unsigned int i;
1873 	SBITMAP_ELT_TYPE *p, *r;
1874 
1875 	e = EDGE_PRED (b, ix);
1876 	if (e->src == ENTRY_BLOCK_PTR_FOR_FN (cfun))
1877 	  continue;
1878 
1879 	p = src[e->src->index]->elms;
1880 	r = dst->elms;
1881 	for (i = 0; i < set_size; i++)
1882 	  *r++ |= *p++;
1883       }
1884 }
1885 
1886 /* Returns the list of basic blocks in the function in an order that guarantees
1887    that if a block X has just a single predecessor Y, then Y is after X in the
1888    ordering.  */
1889 
1890 basic_block *
single_pred_before_succ_order(void)1891 single_pred_before_succ_order (void)
1892 {
1893   basic_block x, y;
1894   basic_block *order = XNEWVEC (basic_block, n_basic_blocks_for_fn (cfun));
1895   unsigned n = n_basic_blocks_for_fn (cfun) - NUM_FIXED_BLOCKS;
1896   unsigned np, i;
1897   auto_sbitmap visited (last_basic_block_for_fn (cfun));
1898 
1899 #define MARK_VISITED(BB) (bitmap_set_bit (visited, (BB)->index))
1900 #define VISITED_P(BB) (bitmap_bit_p (visited, (BB)->index))
1901 
1902   bitmap_clear (visited);
1903 
1904   MARK_VISITED (ENTRY_BLOCK_PTR_FOR_FN (cfun));
1905   FOR_EACH_BB_FN (x, cfun)
1906     {
1907       if (VISITED_P (x))
1908 	continue;
1909 
1910       /* Walk the predecessors of x as long as they have precisely one
1911 	 predecessor and add them to the list, so that they get stored
1912 	 after x.  */
1913       for (y = x, np = 1;
1914 	   single_pred_p (y) && !VISITED_P (single_pred (y));
1915 	   y = single_pred (y))
1916 	np++;
1917       for (y = x, i = n - np;
1918 	   single_pred_p (y) && !VISITED_P (single_pred (y));
1919 	   y = single_pred (y), i++)
1920 	{
1921 	  order[i] = y;
1922 	  MARK_VISITED (y);
1923 	}
1924       order[i] = y;
1925       MARK_VISITED (y);
1926 
1927       gcc_assert (i == n - 1);
1928       n -= np;
1929     }
1930 
1931   gcc_assert (n == 0);
1932   return order;
1933 
1934 #undef MARK_VISITED
1935 #undef VISITED_P
1936 }
1937 
1938 /* Ignoring loop backedges, if BB has precisely one incoming edge then
1939    return that edge.  Otherwise return NULL.
1940 
1941    When IGNORE_NOT_EXECUTABLE is true, also ignore edges that are not marked
1942    as executable.  */
1943 
1944 edge
single_pred_edge_ignoring_loop_edges(basic_block bb,bool ignore_not_executable)1945 single_pred_edge_ignoring_loop_edges (basic_block bb,
1946 				      bool ignore_not_executable)
1947 {
1948   edge retval = NULL;
1949   edge e;
1950   edge_iterator ei;
1951 
1952   FOR_EACH_EDGE (e, ei, bb->preds)
1953     {
1954       /* A loop back edge can be identified by the destination of
1955 	 the edge dominating the source of the edge.  */
1956       if (dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
1957 	continue;
1958 
1959       /* We can safely ignore edges that are not executable.  */
1960       if (ignore_not_executable
1961 	  && (e->flags & EDGE_EXECUTABLE) == 0)
1962 	continue;
1963 
1964       /* If we have already seen a non-loop edge, then we must have
1965 	 multiple incoming non-loop edges and thus we return NULL.  */
1966       if (retval)
1967 	return NULL;
1968 
1969       /* This is the first non-loop incoming edge we have found.  Record
1970 	 it.  */
1971       retval = e;
1972     }
1973 
1974   return retval;
1975 }
1976