xref: /openbsd-src/gnu/usr.bin/gcc/gcc/cfganal.c (revision c87b03e512fc05ed6e0222f6fb0ae86264b1d05b)
1 /* Control flow graph analysis code for GNU compiler.
2    Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3    1999, 2000, 2001, 2003 Free Software Foundation, Inc.
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING.  If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA.  */
21 
22 /* This file contains various simple utilities to analyze the CFG.  */
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "hard-reg-set.h"
27 #include "basic-block.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "toplev.h"
31 #include "tm_p.h"
32 
33 /* Store the data structures necessary for depth-first search.  */
34 struct depth_first_search_dsS {
35   /* stack for backtracking during the algorithm */
36   basic_block *stack;
37 
38   /* number of edges in the stack.  That is, positions 0, ..., sp-1
39      have edges.  */
40   unsigned int sp;
41 
42   /* record of basic blocks already seen by depth-first search */
43   sbitmap visited_blocks;
44 };
45 typedef struct depth_first_search_dsS *depth_first_search_ds;
46 
47 static void flow_dfs_compute_reverse_init
48   PARAMS ((depth_first_search_ds));
49 static void flow_dfs_compute_reverse_add_bb
50   PARAMS ((depth_first_search_ds, basic_block));
51 static basic_block flow_dfs_compute_reverse_execute
52   PARAMS ((depth_first_search_ds));
53 static void flow_dfs_compute_reverse_finish
54   PARAMS ((depth_first_search_ds));
55 static void remove_fake_successors	PARAMS ((basic_block));
56 static bool need_fake_edge_p		PARAMS ((rtx));
57 static bool flow_active_insn_p		PARAMS ((rtx));
58 
59 /* Like active_insn_p, except keep the return value clobber around
60    even after reload.  */
61 
62 static bool
flow_active_insn_p(insn)63 flow_active_insn_p (insn)
64      rtx insn;
65 {
66   if (active_insn_p (insn))
67     return true;
68 
69   /* A clobber of the function return value exists for buggy
70      programs that fail to return a value.  Its effect is to
71      keep the return value from being live across the entire
72      function.  If we allow it to be skipped, we introduce the
73      possibility for register livetime aborts.  */
74   if (GET_CODE (PATTERN (insn)) == CLOBBER
75       && GET_CODE (XEXP (PATTERN (insn), 0)) == REG
76       && REG_FUNCTION_VALUE_P (XEXP (PATTERN (insn), 0)))
77     return true;
78 
79   return false;
80 }
81 
82 /* Return true if the block has no effect and only forwards control flow to
83    its single destination.  */
84 
85 bool
forwarder_block_p(bb)86 forwarder_block_p (bb)
87      basic_block bb;
88 {
89   rtx insn;
90 
91   if (bb == EXIT_BLOCK_PTR || bb == ENTRY_BLOCK_PTR
92       || !bb->succ || bb->succ->succ_next)
93     return false;
94 
95   for (insn = bb->head; insn != bb->end; insn = NEXT_INSN (insn))
96     if (INSN_P (insn) && flow_active_insn_p (insn))
97       return false;
98 
99   return (!INSN_P (insn)
100 	  || (GET_CODE (insn) == JUMP_INSN && simplejump_p (insn))
101 	  || !flow_active_insn_p (insn));
102 }
103 
104 /* Return nonzero if we can reach target from src by falling through.  */
105 
106 bool
can_fallthru(src,target)107 can_fallthru (src, target)
108      basic_block src, target;
109 {
110   rtx insn = src->end;
111   rtx insn2 = target->head;
112 
113   if (src->next_bb != target)
114     return 0;
115 
116   if (!active_insn_p (insn2))
117     insn2 = next_active_insn (insn2);
118 
119   /* ??? Later we may add code to move jump tables offline.  */
120   return next_active_insn (insn) == insn2;
121 }
122 
123 /* Mark the back edges in DFS traversal.
124    Return nonzero if a loop (natural or otherwise) is present.
125    Inspired by Depth_First_Search_PP described in:
126 
127      Advanced Compiler Design and Implementation
128      Steven Muchnick
129      Morgan Kaufmann, 1997
130 
131    and heavily borrowed from flow_depth_first_order_compute.  */
132 
133 bool
mark_dfs_back_edges()134 mark_dfs_back_edges ()
135 {
136   edge *stack;
137   int *pre;
138   int *post;
139   int sp;
140   int prenum = 1;
141   int postnum = 1;
142   sbitmap visited;
143   bool found = false;
144 
145   /* Allocate the preorder and postorder number arrays.  */
146   pre = (int *) xcalloc (last_basic_block, sizeof (int));
147   post = (int *) xcalloc (last_basic_block, sizeof (int));
148 
149   /* Allocate stack for back-tracking up CFG.  */
150   stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
151   sp = 0;
152 
153   /* Allocate bitmap to track nodes that have been visited.  */
154   visited = sbitmap_alloc (last_basic_block);
155 
156   /* None of the nodes in the CFG have been visited yet.  */
157   sbitmap_zero (visited);
158 
159   /* Push the first edge on to the stack.  */
160   stack[sp++] = ENTRY_BLOCK_PTR->succ;
161 
162   while (sp)
163     {
164       edge e;
165       basic_block src;
166       basic_block dest;
167 
168       /* Look at the edge on the top of the stack.  */
169       e = stack[sp - 1];
170       src = e->src;
171       dest = e->dest;
172       e->flags &= ~EDGE_DFS_BACK;
173 
174       /* Check if the edge destination has been visited yet.  */
175       if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
176 	{
177 	  /* Mark that we have visited the destination.  */
178 	  SET_BIT (visited, dest->index);
179 
180 	  pre[dest->index] = prenum++;
181 	  if (dest->succ)
182 	    {
183 	      /* Since the DEST node has been visited for the first
184 		 time, check its successors.  */
185 	      stack[sp++] = dest->succ;
186 	    }
187 	  else
188 	    post[dest->index] = postnum++;
189 	}
190       else
191 	{
192 	  if (dest != EXIT_BLOCK_PTR && src != ENTRY_BLOCK_PTR
193 	      && pre[src->index] >= pre[dest->index]
194 	      && post[dest->index] == 0)
195 	    e->flags |= EDGE_DFS_BACK, found = true;
196 
197 	  if (! e->succ_next && src != ENTRY_BLOCK_PTR)
198 	    post[src->index] = postnum++;
199 
200 	  if (e->succ_next)
201 	    stack[sp - 1] = e->succ_next;
202 	  else
203 	    sp--;
204 	}
205     }
206 
207   free (pre);
208   free (post);
209   free (stack);
210   sbitmap_free (visited);
211 
212   return found;
213 }
214 
215 /* Set the flag EDGE_CAN_FALLTHRU for edges that can be fallthru.  */
216 
217 void
set_edge_can_fallthru_flag()218 set_edge_can_fallthru_flag ()
219 {
220   basic_block bb;
221 
222   FOR_EACH_BB (bb)
223     {
224       edge e;
225 
226       for (e = bb->succ; e; e = e->succ_next)
227 	{
228 	  e->flags &= ~EDGE_CAN_FALLTHRU;
229 
230 	  /* The FALLTHRU edge is also CAN_FALLTHRU edge.  */
231 	  if (e->flags & EDGE_FALLTHRU)
232 	    e->flags |= EDGE_CAN_FALLTHRU;
233 	}
234 
235       /* If the BB ends with an invertable condjump all (2) edges are
236 	 CAN_FALLTHRU edges.  */
237       if (!bb->succ || !bb->succ->succ_next || bb->succ->succ_next->succ_next)
238 	continue;
239       if (!any_condjump_p (bb->end))
240 	continue;
241       if (!invert_jump (bb->end, JUMP_LABEL (bb->end), 0))
242 	continue;
243       invert_jump (bb->end, JUMP_LABEL (bb->end), 0);
244       bb->succ->flags |= EDGE_CAN_FALLTHRU;
245       bb->succ->succ_next->flags |= EDGE_CAN_FALLTHRU;
246     }
247 }
248 
249 /* Return true if we need to add fake edge to exit.
250    Helper function for the flow_call_edges_add.  */
251 
252 static bool
need_fake_edge_p(insn)253 need_fake_edge_p (insn)
254      rtx insn;
255 {
256   if (!INSN_P (insn))
257     return false;
258 
259   if ((GET_CODE (insn) == CALL_INSN
260        && !SIBLING_CALL_P (insn)
261        && !find_reg_note (insn, REG_NORETURN, NULL)
262        && !find_reg_note (insn, REG_ALWAYS_RETURN, NULL)
263        && !CONST_OR_PURE_CALL_P (insn)))
264     return true;
265 
266   return ((GET_CODE (PATTERN (insn)) == ASM_OPERANDS
267 	   && MEM_VOLATILE_P (PATTERN (insn)))
268 	  || (GET_CODE (PATTERN (insn)) == PARALLEL
269 	      && asm_noperands (insn) != -1
270 	      && MEM_VOLATILE_P (XVECEXP (PATTERN (insn), 0, 0)))
271 	  || GET_CODE (PATTERN (insn)) == ASM_INPUT);
272 }
273 
274 /* Add fake edges to the function exit for any non constant and non noreturn
275    calls, volatile inline assembly in the bitmap of blocks specified by
276    BLOCKS or to the whole CFG if BLOCKS is zero.  Return the number of blocks
277    that were split.
278 
279    The goal is to expose cases in which entering a basic block does not imply
280    that all subsequent instructions must be executed.  */
281 
282 int
flow_call_edges_add(blocks)283 flow_call_edges_add (blocks)
284      sbitmap blocks;
285 {
286   int i;
287   int blocks_split = 0;
288   int last_bb = last_basic_block;
289   bool check_last_block = false;
290 
291   if (n_basic_blocks == 0)
292     return 0;
293 
294   if (! blocks)
295     check_last_block = true;
296   else
297     check_last_block = TEST_BIT (blocks, EXIT_BLOCK_PTR->prev_bb->index);
298 
299   /* In the last basic block, before epilogue generation, there will be
300      a fallthru edge to EXIT.  Special care is required if the last insn
301      of the last basic block is a call because make_edge folds duplicate
302      edges, which would result in the fallthru edge also being marked
303      fake, which would result in the fallthru edge being removed by
304      remove_fake_edges, which would result in an invalid CFG.
305 
306      Moreover, we can't elide the outgoing fake edge, since the block
307      profiler needs to take this into account in order to solve the minimal
308      spanning tree in the case that the call doesn't return.
309 
310      Handle this by adding a dummy instruction in a new last basic block.  */
311   if (check_last_block)
312     {
313       basic_block bb = EXIT_BLOCK_PTR->prev_bb;
314       rtx insn = bb->end;
315 
316       /* Back up past insns that must be kept in the same block as a call.  */
317       while (insn != bb->head
318 	     && keep_with_call_p (insn))
319 	insn = PREV_INSN (insn);
320 
321       if (need_fake_edge_p (insn))
322 	{
323 	  edge e;
324 
325 	  for (e = bb->succ; e; e = e->succ_next)
326 	    if (e->dest == EXIT_BLOCK_PTR)
327 	      {
328 		insert_insn_on_edge (gen_rtx_USE (VOIDmode, const0_rtx), e);
329 		commit_edge_insertions ();
330 		break;
331 	      }
332 	}
333     }
334 
335   /* Now add fake edges to the function exit for any non constant
336      calls since there is no way that we can determine if they will
337      return or not...  */
338 
339   for (i = 0; i < last_bb; i++)
340     {
341       basic_block bb = BASIC_BLOCK (i);
342       rtx insn;
343       rtx prev_insn;
344 
345       if (!bb)
346 	continue;
347 
348       if (blocks && !TEST_BIT (blocks, i))
349 	continue;
350 
351       for (insn = bb->end; ; insn = prev_insn)
352 	{
353 	  prev_insn = PREV_INSN (insn);
354 	  if (need_fake_edge_p (insn))
355 	    {
356 	      edge e;
357 	      rtx split_at_insn = insn;
358 
359 	      /* Don't split the block between a call and an insn that should
360 	         remain in the same block as the call.  */
361 	      if (GET_CODE (insn) == CALL_INSN)
362 		while (split_at_insn != bb->end
363 		       && keep_with_call_p (NEXT_INSN (split_at_insn)))
364 		  split_at_insn = NEXT_INSN (split_at_insn);
365 
366 	      /* The handling above of the final block before the epilogue
367 	         should be enough to verify that there is no edge to the exit
368 		 block in CFG already.  Calling make_edge in such case would
369 		 cause us to mark that edge as fake and remove it later.  */
370 
371 #ifdef ENABLE_CHECKING
372 	      if (split_at_insn == bb->end)
373 		for (e = bb->succ; e; e = e->succ_next)
374 		  if (e->dest == EXIT_BLOCK_PTR)
375 		    abort ();
376 #endif
377 
378 	      /* Note that the following may create a new basic block
379 		 and renumber the existing basic blocks.  */
380 	      if (split_at_insn != bb->end)
381 		{
382 		  e = split_block (bb, split_at_insn);
383 		  if (e)
384 		    blocks_split++;
385 		}
386 
387 	      make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
388 	    }
389 
390 	  if (insn == bb->head)
391 	    break;
392 	}
393     }
394 
395   if (blocks_split)
396     verify_flow_info ();
397 
398   return blocks_split;
399 }
400 
401 /* Find unreachable blocks.  An unreachable block will have 0 in
402    the reachable bit in block->flags.  A nonzero value indicates the
403    block is reachable.  */
404 
405 void
find_unreachable_blocks()406 find_unreachable_blocks ()
407 {
408   edge e;
409   basic_block *tos, *worklist, bb;
410 
411   tos = worklist =
412 	(basic_block *) xmalloc (sizeof (basic_block) * n_basic_blocks);
413 
414   /* Clear all the reachability flags.  */
415 
416   FOR_EACH_BB (bb)
417     bb->flags &= ~BB_REACHABLE;
418 
419   /* Add our starting points to the worklist.  Almost always there will
420      be only one.  It isn't inconceivable that we might one day directly
421      support Fortran alternate entry points.  */
422 
423   for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
424     {
425       *tos++ = e->dest;
426 
427       /* Mark the block reachable.  */
428       e->dest->flags |= BB_REACHABLE;
429     }
430 
431   /* Iterate: find everything reachable from what we've already seen.  */
432 
433   while (tos != worklist)
434     {
435       basic_block b = *--tos;
436 
437       for (e = b->succ; e; e = e->succ_next)
438 	if (!(e->dest->flags & BB_REACHABLE))
439 	  {
440 	    *tos++ = e->dest;
441 	    e->dest->flags |= BB_REACHABLE;
442 	  }
443     }
444 
445   free (worklist);
446 }
447 
448 /* Functions to access an edge list with a vector representation.
449    Enough data is kept such that given an index number, the
450    pred and succ that edge represents can be determined, or
451    given a pred and a succ, its index number can be returned.
452    This allows algorithms which consume a lot of memory to
453    represent the normally full matrix of edge (pred,succ) with a
454    single indexed vector,  edge (EDGE_INDEX (pred, succ)), with no
455    wasted space in the client code due to sparse flow graphs.  */
456 
457 /* This functions initializes the edge list. Basically the entire
458    flowgraph is processed, and all edges are assigned a number,
459    and the data structure is filled in.  */
460 
461 struct edge_list *
create_edge_list()462 create_edge_list ()
463 {
464   struct edge_list *elist;
465   edge e;
466   int num_edges;
467   int block_count;
468   basic_block bb;
469 
470   block_count = n_basic_blocks + 2;   /* Include the entry and exit blocks.  */
471 
472   num_edges = 0;
473 
474   /* Determine the number of edges in the flow graph by counting successor
475      edges on each basic block.  */
476   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
477     {
478       for (e = bb->succ; e; e = e->succ_next)
479 	num_edges++;
480     }
481 
482   elist = (struct edge_list *) xmalloc (sizeof (struct edge_list));
483   elist->num_blocks = block_count;
484   elist->num_edges = num_edges;
485   elist->index_to_edge = (edge *) xmalloc (sizeof (edge) * num_edges);
486 
487   num_edges = 0;
488 
489   /* Follow successors of blocks, and register these edges.  */
490   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
491     for (e = bb->succ; e; e = e->succ_next)
492       elist->index_to_edge[num_edges++] = e;
493 
494   return elist;
495 }
496 
497 /* This function free's memory associated with an edge list.  */
498 
499 void
free_edge_list(elist)500 free_edge_list (elist)
501      struct edge_list *elist;
502 {
503   if (elist)
504     {
505       free (elist->index_to_edge);
506       free (elist);
507     }
508 }
509 
510 /* This function provides debug output showing an edge list.  */
511 
512 void
print_edge_list(f,elist)513 print_edge_list (f, elist)
514      FILE *f;
515      struct edge_list *elist;
516 {
517   int x;
518 
519   fprintf (f, "Compressed edge list, %d BBs + entry & exit, and %d edges\n",
520 	   elist->num_blocks - 2, elist->num_edges);
521 
522   for (x = 0; x < elist->num_edges; x++)
523     {
524       fprintf (f, " %-4d - edge(", x);
525       if (INDEX_EDGE_PRED_BB (elist, x) == ENTRY_BLOCK_PTR)
526 	fprintf (f, "entry,");
527       else
528 	fprintf (f, "%d,", INDEX_EDGE_PRED_BB (elist, x)->index);
529 
530       if (INDEX_EDGE_SUCC_BB (elist, x) == EXIT_BLOCK_PTR)
531 	fprintf (f, "exit)\n");
532       else
533 	fprintf (f, "%d)\n", INDEX_EDGE_SUCC_BB (elist, x)->index);
534     }
535 }
536 
537 /* This function provides an internal consistency check of an edge list,
538    verifying that all edges are present, and that there are no
539    extra edges.  */
540 
541 void
verify_edge_list(f,elist)542 verify_edge_list (f, elist)
543      FILE *f;
544      struct edge_list *elist;
545 {
546   int pred, succ, index;
547   edge e;
548   basic_block bb, p, s;
549 
550   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
551     {
552       for (e = bb->succ; e; e = e->succ_next)
553 	{
554 	  pred = e->src->index;
555 	  succ = e->dest->index;
556 	  index = EDGE_INDEX (elist, e->src, e->dest);
557 	  if (index == EDGE_INDEX_NO_EDGE)
558 	    {
559 	      fprintf (f, "*p* No index for edge from %d to %d\n", pred, succ);
560 	      continue;
561 	    }
562 
563 	  if (INDEX_EDGE_PRED_BB (elist, index)->index != pred)
564 	    fprintf (f, "*p* Pred for index %d should be %d not %d\n",
565 		     index, pred, INDEX_EDGE_PRED_BB (elist, index)->index);
566 	  if (INDEX_EDGE_SUCC_BB (elist, index)->index != succ)
567 	    fprintf (f, "*p* Succ for index %d should be %d not %d\n",
568 		     index, succ, INDEX_EDGE_SUCC_BB (elist, index)->index);
569 	}
570     }
571 
572   /* We've verified that all the edges are in the list, now lets make sure
573      there are no spurious edges in the list.  */
574 
575   FOR_BB_BETWEEN (p, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
576     FOR_BB_BETWEEN (s, ENTRY_BLOCK_PTR->next_bb, NULL, next_bb)
577       {
578 	int found_edge = 0;
579 
580 	for (e = p->succ; e; e = e->succ_next)
581 	  if (e->dest == s)
582 	    {
583 	      found_edge = 1;
584 	      break;
585 	    }
586 
587 	for (e = s->pred; e; e = e->pred_next)
588 	  if (e->src == p)
589 	    {
590 	      found_edge = 1;
591 	      break;
592 	    }
593 
594 	if (EDGE_INDEX (elist, p, s)
595 	    == EDGE_INDEX_NO_EDGE && found_edge != 0)
596 	  fprintf (f, "*** Edge (%d, %d) appears to not have an index\n",
597 		   p->index, s->index);
598 	if (EDGE_INDEX (elist, p, s)
599 	    != EDGE_INDEX_NO_EDGE && found_edge == 0)
600 	  fprintf (f, "*** Edge (%d, %d) has index %d, but there is no edge\n",
601 		   p->index, s->index, EDGE_INDEX (elist, p, s));
602       }
603 }
604 
605 /* This routine will determine what, if any, edge there is between
606    a specified predecessor and successor.  */
607 
608 int
find_edge_index(edge_list,pred,succ)609 find_edge_index (edge_list, pred, succ)
610      struct edge_list *edge_list;
611      basic_block pred, succ;
612 {
613   int x;
614 
615   for (x = 0; x < NUM_EDGES (edge_list); x++)
616     if (INDEX_EDGE_PRED_BB (edge_list, x) == pred
617 	&& INDEX_EDGE_SUCC_BB (edge_list, x) == succ)
618       return x;
619 
620   return (EDGE_INDEX_NO_EDGE);
621 }
622 
623 /* Dump the list of basic blocks in the bitmap NODES.  */
624 
625 void
flow_nodes_print(str,nodes,file)626 flow_nodes_print (str, nodes, file)
627      const char *str;
628      const sbitmap nodes;
629      FILE *file;
630 {
631   int node;
632 
633   if (! nodes)
634     return;
635 
636   fprintf (file, "%s { ", str);
637   EXECUTE_IF_SET_IN_SBITMAP (nodes, 0, node, {fprintf (file, "%d ", node);});
638   fputs ("}\n", file);
639 }
640 
641 /* Dump the list of edges in the array EDGE_LIST.  */
642 
643 void
flow_edge_list_print(str,edge_list,num_edges,file)644 flow_edge_list_print (str, edge_list, num_edges, file)
645      const char *str;
646      const edge *edge_list;
647      int num_edges;
648      FILE *file;
649 {
650   int i;
651 
652   if (! edge_list)
653     return;
654 
655   fprintf (file, "%s { ", str);
656   for (i = 0; i < num_edges; i++)
657     fprintf (file, "%d->%d ", edge_list[i]->src->index,
658 	     edge_list[i]->dest->index);
659 
660   fputs ("}\n", file);
661 }
662 
663 
664 /* This routine will remove any fake successor edges for a basic block.
665    When the edge is removed, it is also removed from whatever predecessor
666    list it is in.  */
667 
668 static void
remove_fake_successors(bb)669 remove_fake_successors (bb)
670      basic_block bb;
671 {
672   edge e;
673 
674   for (e = bb->succ; e;)
675     {
676       edge tmp = e;
677 
678       e = e->succ_next;
679       if ((tmp->flags & EDGE_FAKE) == EDGE_FAKE)
680 	remove_edge (tmp);
681     }
682 }
683 
684 /* This routine will remove all fake edges from the flow graph.  If
685    we remove all fake successors, it will automatically remove all
686    fake predecessors.  */
687 
688 void
remove_fake_edges()689 remove_fake_edges ()
690 {
691   basic_block bb;
692 
693   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
694     remove_fake_successors (bb);
695 }
696 
697 /* This function will add a fake edge between any block which has no
698    successors, and the exit block. Some data flow equations require these
699    edges to exist.  */
700 
701 void
add_noreturn_fake_exit_edges()702 add_noreturn_fake_exit_edges ()
703 {
704   basic_block bb;
705 
706   FOR_EACH_BB (bb)
707     if (bb->succ == NULL)
708       make_single_succ_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
709 }
710 
711 /* This function adds a fake edge between any infinite loops to the
712    exit block.  Some optimizations require a path from each node to
713    the exit node.
714 
715    See also Morgan, Figure 3.10, pp. 82-83.
716 
717    The current implementation is ugly, not attempting to minimize the
718    number of inserted fake edges.  To reduce the number of fake edges
719    to insert, add fake edges from _innermost_ loops containing only
720    nodes not reachable from the exit block.  */
721 
722 void
connect_infinite_loops_to_exit()723 connect_infinite_loops_to_exit ()
724 {
725   basic_block unvisited_block;
726   struct depth_first_search_dsS dfs_ds;
727 
728   /* Perform depth-first search in the reverse graph to find nodes
729      reachable from the exit block.  */
730   flow_dfs_compute_reverse_init (&dfs_ds);
731   flow_dfs_compute_reverse_add_bb (&dfs_ds, EXIT_BLOCK_PTR);
732 
733   /* Repeatedly add fake edges, updating the unreachable nodes.  */
734   while (1)
735     {
736       unvisited_block = flow_dfs_compute_reverse_execute (&dfs_ds);
737       if (!unvisited_block)
738 	break;
739 
740       make_edge (unvisited_block, EXIT_BLOCK_PTR, EDGE_FAKE);
741       flow_dfs_compute_reverse_add_bb (&dfs_ds, unvisited_block);
742     }
743 
744   flow_dfs_compute_reverse_finish (&dfs_ds);
745   return;
746 }
747 
748 /* Compute reverse top sort order */
749 
750 void
flow_reverse_top_sort_order_compute(rts_order)751 flow_reverse_top_sort_order_compute (rts_order)
752      int *rts_order;
753 {
754   edge *stack;
755   int sp;
756   int postnum = 0;
757   sbitmap visited;
758 
759   /* Allocate stack for back-tracking up CFG.  */
760   stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
761   sp = 0;
762 
763   /* Allocate bitmap to track nodes that have been visited.  */
764   visited = sbitmap_alloc (last_basic_block);
765 
766   /* None of the nodes in the CFG have been visited yet.  */
767   sbitmap_zero (visited);
768 
769   /* Push the first edge on to the stack.  */
770   stack[sp++] = ENTRY_BLOCK_PTR->succ;
771 
772   while (sp)
773     {
774       edge e;
775       basic_block src;
776       basic_block dest;
777 
778       /* Look at the edge on the top of the stack.  */
779       e = stack[sp - 1];
780       src = e->src;
781       dest = e->dest;
782 
783       /* Check if the edge destination has been visited yet.  */
784       if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
785 	{
786 	  /* Mark that we have visited the destination.  */
787 	  SET_BIT (visited, dest->index);
788 
789 	  if (dest->succ)
790 	    /* Since the DEST node has been visited for the first
791 	       time, check its successors.  */
792 	    stack[sp++] = dest->succ;
793 	  else
794 	    rts_order[postnum++] = dest->index;
795 	}
796       else
797 	{
798 	  if (! e->succ_next && src != ENTRY_BLOCK_PTR)
799 	   rts_order[postnum++] = src->index;
800 
801 	  if (e->succ_next)
802 	    stack[sp - 1] = e->succ_next;
803 	  else
804 	    sp--;
805 	}
806     }
807 
808   free (stack);
809   sbitmap_free (visited);
810 }
811 
812 /* Compute the depth first search order and store in the array
813   DFS_ORDER if nonzero, marking the nodes visited in VISITED.  If
814   RC_ORDER is nonzero, return the reverse completion number for each
815   node.  Returns the number of nodes visited.  A depth first search
816   tries to get as far away from the starting point as quickly as
817   possible.  */
818 
819 int
flow_depth_first_order_compute(dfs_order,rc_order)820 flow_depth_first_order_compute (dfs_order, rc_order)
821      int *dfs_order;
822      int *rc_order;
823 {
824   edge *stack;
825   int sp;
826   int dfsnum = 0;
827   int rcnum = n_basic_blocks - 1;
828   sbitmap visited;
829 
830   /* Allocate stack for back-tracking up CFG.  */
831   stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
832   sp = 0;
833 
834   /* Allocate bitmap to track nodes that have been visited.  */
835   visited = sbitmap_alloc (last_basic_block);
836 
837   /* None of the nodes in the CFG have been visited yet.  */
838   sbitmap_zero (visited);
839 
840   /* Push the first edge on to the stack.  */
841   stack[sp++] = ENTRY_BLOCK_PTR->succ;
842 
843   while (sp)
844     {
845       edge e;
846       basic_block src;
847       basic_block dest;
848 
849       /* Look at the edge on the top of the stack.  */
850       e = stack[sp - 1];
851       src = e->src;
852       dest = e->dest;
853 
854       /* Check if the edge destination has been visited yet.  */
855       if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
856 	{
857 	  /* Mark that we have visited the destination.  */
858 	  SET_BIT (visited, dest->index);
859 
860 	  if (dfs_order)
861 	    dfs_order[dfsnum] = dest->index;
862 
863 	  dfsnum++;
864 
865 	  if (dest->succ)
866 	    /* Since the DEST node has been visited for the first
867 	       time, check its successors.  */
868 	    stack[sp++] = dest->succ;
869 	  else if (rc_order)
870 	    /* There are no successors for the DEST node so assign
871 	       its reverse completion number.  */
872 	    rc_order[rcnum--] = dest->index;
873 	}
874       else
875 	{
876 	  if (! e->succ_next && src != ENTRY_BLOCK_PTR
877 	      && rc_order)
878 	    /* There are no more successors for the SRC node
879 	       so assign its reverse completion number.  */
880 	    rc_order[rcnum--] = src->index;
881 
882 	  if (e->succ_next)
883 	    stack[sp - 1] = e->succ_next;
884 	  else
885 	    sp--;
886 	}
887     }
888 
889   free (stack);
890   sbitmap_free (visited);
891 
892   /* The number of nodes visited should not be greater than
893      n_basic_blocks.  */
894   if (dfsnum > n_basic_blocks)
895     abort ();
896 
897   /* There are some nodes left in the CFG that are unreachable.  */
898   if (dfsnum < n_basic_blocks)
899     abort ();
900 
901   return dfsnum;
902 }
903 
904 struct dfst_node
905 {
906     unsigned nnodes;
907     struct dfst_node **node;
908     struct dfst_node *up;
909 };
910 
911 /* Compute a preorder transversal ordering such that a sub-tree which
912    is the source of a cross edge appears before the sub-tree which is
913    the destination of the cross edge.  This allows for easy detection
914    of all the entry blocks for a loop.
915 
916    The ordering is compute by:
917 
918      1) Generating a depth first spanning tree.
919 
920      2) Walking the resulting tree from right to left.  */
921 
922 void
flow_preorder_transversal_compute(pot_order)923 flow_preorder_transversal_compute (pot_order)
924      int *pot_order;
925 {
926   edge e;
927   edge *stack;
928   int i;
929   int max_successors;
930   int sp;
931   sbitmap visited;
932   struct dfst_node *node;
933   struct dfst_node *dfst;
934   basic_block bb;
935 
936   /* Allocate stack for back-tracking up CFG.  */
937   stack = (edge *) xmalloc ((n_basic_blocks + 1) * sizeof (edge));
938   sp = 0;
939 
940   /* Allocate the tree.  */
941   dfst = (struct dfst_node *) xcalloc (last_basic_block,
942 				       sizeof (struct dfst_node));
943 
944   FOR_EACH_BB (bb)
945     {
946       max_successors = 0;
947       for (e = bb->succ; e; e = e->succ_next)
948 	max_successors++;
949 
950       dfst[bb->index].node
951 	= (max_successors
952 	   ? (struct dfst_node **) xcalloc (max_successors,
953 					    sizeof (struct dfst_node *))
954 	   : NULL);
955     }
956 
957   /* Allocate bitmap to track nodes that have been visited.  */
958   visited = sbitmap_alloc (last_basic_block);
959 
960   /* None of the nodes in the CFG have been visited yet.  */
961   sbitmap_zero (visited);
962 
963   /* Push the first edge on to the stack.  */
964   stack[sp++] = ENTRY_BLOCK_PTR->succ;
965 
966   while (sp)
967     {
968       basic_block src;
969       basic_block dest;
970 
971       /* Look at the edge on the top of the stack.  */
972       e = stack[sp - 1];
973       src = e->src;
974       dest = e->dest;
975 
976       /* Check if the edge destination has been visited yet.  */
977       if (dest != EXIT_BLOCK_PTR && ! TEST_BIT (visited, dest->index))
978 	{
979 	  /* Mark that we have visited the destination.  */
980 	  SET_BIT (visited, dest->index);
981 
982 	  /* Add the destination to the preorder tree.  */
983 	  if (src != ENTRY_BLOCK_PTR)
984 	    {
985 	      dfst[src->index].node[dfst[src->index].nnodes++]
986 		= &dfst[dest->index];
987 	      dfst[dest->index].up = &dfst[src->index];
988 	    }
989 
990 	  if (dest->succ)
991 	    /* Since the DEST node has been visited for the first
992 	       time, check its successors.  */
993 	    stack[sp++] = dest->succ;
994 	}
995 
996       else if (e->succ_next)
997 	stack[sp - 1] = e->succ_next;
998       else
999 	sp--;
1000     }
1001 
1002   free (stack);
1003   sbitmap_free (visited);
1004 
1005   /* Record the preorder transversal order by
1006      walking the tree from right to left.  */
1007 
1008   i = 0;
1009   node = &dfst[ENTRY_BLOCK_PTR->next_bb->index];
1010   pot_order[i++] = 0;
1011 
1012   while (node)
1013     {
1014       if (node->nnodes)
1015 	{
1016 	  node = node->node[--node->nnodes];
1017 	  pot_order[i++] = node - dfst;
1018 	}
1019       else
1020 	node = node->up;
1021     }
1022 
1023   /* Free the tree.  */
1024 
1025   for (i = 0; i < last_basic_block; i++)
1026     if (dfst[i].node)
1027       free (dfst[i].node);
1028 
1029   free (dfst);
1030 }
1031 
1032 /* Compute the depth first search order on the _reverse_ graph and
1033    store in the array DFS_ORDER, marking the nodes visited in VISITED.
1034    Returns the number of nodes visited.
1035 
1036    The computation is split into three pieces:
1037 
1038    flow_dfs_compute_reverse_init () creates the necessary data
1039    structures.
1040 
1041    flow_dfs_compute_reverse_add_bb () adds a basic block to the data
1042    structures.  The block will start the search.
1043 
1044    flow_dfs_compute_reverse_execute () continues (or starts) the
1045    search using the block on the top of the stack, stopping when the
1046    stack is empty.
1047 
1048    flow_dfs_compute_reverse_finish () destroys the necessary data
1049    structures.
1050 
1051    Thus, the user will probably call ..._init(), call ..._add_bb() to
1052    add a beginning basic block to the stack, call ..._execute(),
1053    possibly add another bb to the stack and again call ..._execute(),
1054    ..., and finally call _finish().  */
1055 
1056 /* Initialize the data structures used for depth-first search on the
1057    reverse graph.  If INITIALIZE_STACK is nonzero, the exit block is
1058    added to the basic block stack.  DATA is the current depth-first
1059    search context.  If INITIALIZE_STACK is nonzero, there is an
1060    element on the stack.  */
1061 
1062 static void
flow_dfs_compute_reverse_init(data)1063 flow_dfs_compute_reverse_init (data)
1064      depth_first_search_ds data;
1065 {
1066   /* Allocate stack for back-tracking up CFG.  */
1067   data->stack = (basic_block *) xmalloc ((n_basic_blocks - (INVALID_BLOCK + 1))
1068 					 * sizeof (basic_block));
1069   data->sp = 0;
1070 
1071   /* Allocate bitmap to track nodes that have been visited.  */
1072   data->visited_blocks = sbitmap_alloc (last_basic_block - (INVALID_BLOCK + 1));
1073 
1074   /* None of the nodes in the CFG have been visited yet.  */
1075   sbitmap_zero (data->visited_blocks);
1076 
1077   return;
1078 }
1079 
1080 /* Add the specified basic block to the top of the dfs data
1081    structures.  When the search continues, it will start at the
1082    block.  */
1083 
1084 static void
flow_dfs_compute_reverse_add_bb(data,bb)1085 flow_dfs_compute_reverse_add_bb (data, bb)
1086      depth_first_search_ds data;
1087      basic_block bb;
1088 {
1089   data->stack[data->sp++] = bb;
1090   SET_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1));
1091 }
1092 
1093 /* Continue the depth-first search through the reverse graph starting with the
1094    block at the stack's top and ending when the stack is empty.  Visited nodes
1095    are marked.  Returns an unvisited basic block, or NULL if there is none
1096    available.  */
1097 
1098 static basic_block
flow_dfs_compute_reverse_execute(data)1099 flow_dfs_compute_reverse_execute (data)
1100      depth_first_search_ds data;
1101 {
1102   basic_block bb;
1103   edge e;
1104 
1105   while (data->sp > 0)
1106     {
1107       bb = data->stack[--data->sp];
1108 
1109       /* Perform depth-first search on adjacent vertices.  */
1110       for (e = bb->pred; e; e = e->pred_next)
1111 	if (!TEST_BIT (data->visited_blocks,
1112 		       e->src->index - (INVALID_BLOCK + 1)))
1113 	  flow_dfs_compute_reverse_add_bb (data, e->src);
1114     }
1115 
1116   /* Determine if there are unvisited basic blocks.  */
1117   FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
1118     if (!TEST_BIT (data->visited_blocks, bb->index - (INVALID_BLOCK + 1)))
1119       return bb;
1120 
1121   return NULL;
1122 }
1123 
1124 /* Destroy the data structures needed for depth-first search on the
1125    reverse graph.  */
1126 
1127 static void
flow_dfs_compute_reverse_finish(data)1128 flow_dfs_compute_reverse_finish (data)
1129      depth_first_search_ds data;
1130 {
1131   free (data->stack);
1132   sbitmap_free (data->visited_blocks);
1133 }
1134 
1135 /* Performs dfs search from BB over vertices satisfying PREDICATE;
1136    if REVERSE, go against direction of edges.  Returns number of blocks
1137    found and their list in RSLT.  RSLT can contain at most RSLT_MAX items.  */
1138 int
dfs_enumerate_from(bb,reverse,predicate,rslt,rslt_max,data)1139 dfs_enumerate_from (bb, reverse, predicate, rslt, rslt_max, data)
1140      basic_block bb;
1141      int reverse;
1142      bool (*predicate) PARAMS ((basic_block, void *));
1143      basic_block *rslt;
1144      int rslt_max;
1145      void *data;
1146 {
1147   basic_block *st, lbb;
1148   int sp = 0, tv = 0;
1149 
1150   st = xcalloc (rslt_max, sizeof (basic_block));
1151   rslt[tv++] = st[sp++] = bb;
1152   bb->flags |= BB_VISITED;
1153   while (sp)
1154     {
1155       edge e;
1156       lbb = st[--sp];
1157       if (reverse)
1158         {
1159           for (e = lbb->pred; e; e = e->pred_next)
1160 	    if (!(e->src->flags & BB_VISITED) && predicate (e->src, data))
1161 	      {
1162 	        if (tv == rslt_max)
1163 	          abort ();
1164 	        rslt[tv++] = st[sp++] = e->src;
1165 	        e->src->flags |= BB_VISITED;
1166 	      }
1167         }
1168       else
1169         {
1170           for (e = lbb->succ; e; e = e->succ_next)
1171 	    if (!(e->dest->flags & BB_VISITED) && predicate (e->dest, data))
1172 	      {
1173 	        if (tv == rslt_max)
1174 	          abort ();
1175 	        rslt[tv++] = st[sp++] = e->dest;
1176 	        e->dest->flags |= BB_VISITED;
1177 	      }
1178 	}
1179     }
1180   free (st);
1181   for (sp = 0; sp < tv; sp++)
1182     rslt[sp]->flags &= ~BB_VISITED;
1183   return tv;
1184 }
1185