xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-cfgcleanup.c (revision 63aea4bd5b445e491ff0389fe27ec78b3099dba3)
1 /* CFG cleanup for trees.
2    Copyright (C) 2001-2013 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "tm_p.h"
26 #include "basic-block.h"
27 #include "diagnostic-core.h"
28 #include "flags.h"
29 #include "function.h"
30 #include "ggc.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
33 #include "tree-pass.h"
34 #include "except.h"
35 #include "cfgloop.h"
36 #include "hashtab.h"
37 #include "tree-ssa-propagate.h"
38 #include "tree-scalar-evolution.h"
39 
40 /* The set of blocks in that at least one of the following changes happened:
41    -- the statement at the end of the block was changed
42    -- the block was newly created
43    -- the set of the predecessors of the block changed
44    -- the set of the successors of the block changed
45    ??? Maybe we could track these changes separately, since they determine
46        what cleanups it makes sense to try on the block.  */
47 bitmap cfgcleanup_altered_bbs;
48 
49 /* Remove any fallthru edge from EV.  Return true if an edge was removed.  */
50 
51 static bool
52 remove_fallthru_edge (vec<edge, va_gc> *ev)
53 {
54   edge_iterator ei;
55   edge e;
56 
57   FOR_EACH_EDGE (e, ei, ev)
58     if ((e->flags & EDGE_FALLTHRU) != 0)
59       {
60 	remove_edge_and_dominated_blocks (e);
61 	return true;
62       }
63   return false;
64 }
65 
66 
67 /* Disconnect an unreachable block in the control expression starting
68    at block BB.  */
69 
70 static bool
71 cleanup_control_expr_graph (basic_block bb, gimple_stmt_iterator gsi)
72 {
73   edge taken_edge;
74   bool retval = false;
75   gimple stmt = gsi_stmt (gsi);
76   tree val;
77 
78   if (!single_succ_p (bb))
79     {
80       edge e;
81       edge_iterator ei;
82       bool warned;
83       location_t loc;
84 
85       fold_defer_overflow_warnings ();
86       loc = gimple_location (stmt);
87       switch (gimple_code (stmt))
88 	{
89 	case GIMPLE_COND:
90 	  val = fold_binary_loc (loc, gimple_cond_code (stmt),
91 				 boolean_type_node,
92 			         gimple_cond_lhs (stmt),
93 				 gimple_cond_rhs (stmt));
94 	  break;
95 
96 	case GIMPLE_SWITCH:
97 	  val = gimple_switch_index (stmt);
98 	  break;
99 
100 	default:
101 	  val = NULL_TREE;
102 	}
103       taken_edge = find_taken_edge (bb, val);
104       if (!taken_edge)
105 	{
106 	  fold_undefer_and_ignore_overflow_warnings ();
107 	  return false;
108 	}
109 
110       /* Remove all the edges except the one that is always executed.  */
111       warned = false;
112       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
113 	{
114 	  if (e != taken_edge)
115 	    {
116 	      if (!warned)
117 		{
118 		  fold_undefer_overflow_warnings
119 		    (true, stmt, WARN_STRICT_OVERFLOW_CONDITIONAL);
120 		  warned = true;
121 		}
122 
123 	      taken_edge->probability += e->probability;
124 	      taken_edge->count += e->count;
125 	      remove_edge_and_dominated_blocks (e);
126 	      retval = true;
127 	    }
128 	  else
129 	    ei_next (&ei);
130 	}
131       if (!warned)
132 	fold_undefer_and_ignore_overflow_warnings ();
133       if (taken_edge->probability > REG_BR_PROB_BASE)
134 	taken_edge->probability = REG_BR_PROB_BASE;
135     }
136   else
137     taken_edge = single_succ_edge (bb);
138 
139   bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
140   gsi_remove (&gsi, true);
141   taken_edge->flags = EDGE_FALLTHRU;
142 
143   return retval;
144 }
145 
146 /* Try to remove superfluous control structures in basic block BB.  Returns
147    true if anything changes.  */
148 
149 static bool
150 cleanup_control_flow_bb (basic_block bb)
151 {
152   gimple_stmt_iterator gsi;
153   bool retval = false;
154   gimple stmt;
155 
156   /* If the last statement of the block could throw and now cannot,
157      we need to prune cfg.  */
158   retval |= gimple_purge_dead_eh_edges (bb);
159 
160   gsi = gsi_last_bb (bb);
161   if (gsi_end_p (gsi))
162     return retval;
163 
164   stmt = gsi_stmt (gsi);
165 
166   if (gimple_code (stmt) == GIMPLE_COND
167       || gimple_code (stmt) == GIMPLE_SWITCH)
168     retval |= cleanup_control_expr_graph (bb, gsi);
169   else if (gimple_code (stmt) == GIMPLE_GOTO
170 	   && TREE_CODE (gimple_goto_dest (stmt)) == ADDR_EXPR
171 	   && (TREE_CODE (TREE_OPERAND (gimple_goto_dest (stmt), 0))
172 	       == LABEL_DECL))
173     {
174       /* If we had a computed goto which has a compile-time determinable
175 	 destination, then we can eliminate the goto.  */
176       edge e;
177       tree label;
178       edge_iterator ei;
179       basic_block target_block;
180 
181       /* First look at all the outgoing edges.  Delete any outgoing
182 	 edges which do not go to the right block.  For the one
183 	 edge which goes to the right block, fix up its flags.  */
184       label = TREE_OPERAND (gimple_goto_dest (stmt), 0);
185       target_block = label_to_block (label);
186       for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
187 	{
188 	  if (e->dest != target_block)
189 	    remove_edge_and_dominated_blocks (e);
190 	  else
191 	    {
192 	      /* Turn off the EDGE_ABNORMAL flag.  */
193 	      e->flags &= ~EDGE_ABNORMAL;
194 
195 	      /* And set EDGE_FALLTHRU.  */
196 	      e->flags |= EDGE_FALLTHRU;
197 	      ei_next (&ei);
198 	    }
199 	}
200 
201       bitmap_set_bit (cfgcleanup_altered_bbs, bb->index);
202       bitmap_set_bit (cfgcleanup_altered_bbs, target_block->index);
203 
204       /* Remove the GOTO_EXPR as it is not needed.  The CFG has all the
205 	 relevant information we need.  */
206       gsi_remove (&gsi, true);
207       retval = true;
208     }
209 
210   /* Check for indirect calls that have been turned into
211      noreturn calls.  */
212   else if (is_gimple_call (stmt)
213            && gimple_call_noreturn_p (stmt)
214            && remove_fallthru_edge (bb->succs))
215     retval = true;
216 
217   return retval;
218 }
219 
220 /* Return true if basic block BB does nothing except pass control
221    flow to another block and that we can safely insert a label at
222    the start of the successor block.
223 
224    As a precondition, we require that BB be not equal to
225    ENTRY_BLOCK_PTR.  */
226 
227 static bool
228 tree_forwarder_block_p (basic_block bb, bool phi_wanted)
229 {
230   gimple_stmt_iterator gsi;
231   location_t locus;
232 
233   /* BB must have a single outgoing edge.  */
234   if (single_succ_p (bb) != 1
235       /* If PHI_WANTED is false, BB must not have any PHI nodes.
236 	 Otherwise, BB must have PHI nodes.  */
237       || gimple_seq_empty_p (phi_nodes (bb)) == phi_wanted
238       /* BB may not be a predecessor of EXIT_BLOCK_PTR.  */
239       || single_succ (bb) == EXIT_BLOCK_PTR
240       /* Nor should this be an infinite loop.  */
241       || single_succ (bb) == bb
242       /* BB may not have an abnormal outgoing edge.  */
243       || (single_succ_edge (bb)->flags & EDGE_ABNORMAL))
244     return false;
245 
246   gcc_checking_assert (bb != ENTRY_BLOCK_PTR);
247 
248   locus = single_succ_edge (bb)->goto_locus;
249 
250   /* There should not be an edge coming from entry, or an EH edge.  */
251   {
252     edge_iterator ei;
253     edge e;
254 
255     FOR_EACH_EDGE (e, ei, bb->preds)
256       if (e->src == ENTRY_BLOCK_PTR || (e->flags & EDGE_EH))
257 	return false;
258       /* If goto_locus of any of the edges differs, prevent removing
259 	 the forwarder block for -O0.  */
260       else if (optimize == 0 && e->goto_locus != locus)
261 	return false;
262   }
263 
264   /* Now walk through the statements backward.  We can ignore labels,
265      anything else means this is not a forwarder block.  */
266   for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi_prev (&gsi))
267     {
268       gimple stmt = gsi_stmt (gsi);
269 
270       switch (gimple_code (stmt))
271 	{
272 	case GIMPLE_LABEL:
273 	  if (DECL_NONLOCAL (gimple_label_label (stmt)))
274 	    return false;
275 	  if (optimize == 0 && gimple_location (stmt) != locus)
276 	    return false;
277 	  break;
278 
279 	  /* ??? For now, hope there's a corresponding debug
280 	     assignment at the destination.  */
281 	case GIMPLE_DEBUG:
282 	  break;
283 
284 	default:
285 	  return false;
286 	}
287     }
288 
289   if (current_loops)
290     {
291       basic_block dest;
292       /* Protect loop latches, headers and preheaders.  */
293       if (bb->loop_father->header == bb)
294 	return false;
295       dest = EDGE_SUCC (bb, 0)->dest;
296 
297       if (dest->loop_father->header == dest)
298 	return false;
299     }
300   return true;
301 }
302 
303 /* If all the PHI nodes in DEST have alternatives for E1 and E2 and
304    those alternatives are equal in each of the PHI nodes, then return
305    true, else return false.  */
306 
307 static bool
308 phi_alternatives_equal (basic_block dest, edge e1, edge e2)
309 {
310   int n1 = e1->dest_idx;
311   int n2 = e2->dest_idx;
312   gimple_stmt_iterator gsi;
313 
314   for (gsi = gsi_start_phis (dest); !gsi_end_p (gsi); gsi_next (&gsi))
315     {
316       gimple phi = gsi_stmt (gsi);
317       tree val1 = gimple_phi_arg_def (phi, n1);
318       tree val2 = gimple_phi_arg_def (phi, n2);
319 
320       gcc_assert (val1 != NULL_TREE);
321       gcc_assert (val2 != NULL_TREE);
322 
323       if (!operand_equal_for_phi_arg_p (val1, val2))
324 	return false;
325     }
326 
327   return true;
328 }
329 
330 /* Removes forwarder block BB.  Returns false if this failed.  */
331 
332 static bool
333 remove_forwarder_block (basic_block bb)
334 {
335   edge succ = single_succ_edge (bb), e, s;
336   basic_block dest = succ->dest;
337   gimple label;
338   edge_iterator ei;
339   gimple_stmt_iterator gsi, gsi_to;
340   bool can_move_debug_stmts;
341 
342   /* We check for infinite loops already in tree_forwarder_block_p.
343      However it may happen that the infinite loop is created
344      afterwards due to removal of forwarders.  */
345   if (dest == bb)
346     return false;
347 
348   /* If the destination block consists of a nonlocal label or is a
349      EH landing pad, do not merge it.  */
350   label = first_stmt (dest);
351   if (label
352       && gimple_code (label) == GIMPLE_LABEL
353       && (DECL_NONLOCAL (gimple_label_label (label))
354 	  || EH_LANDING_PAD_NR (gimple_label_label (label)) != 0))
355     return false;
356 
357   /* If there is an abnormal edge to basic block BB, but not into
358      dest, problems might occur during removal of the phi node at out
359      of ssa due to overlapping live ranges of registers.
360 
361      If there is an abnormal edge in DEST, the problems would occur
362      anyway since cleanup_dead_labels would then merge the labels for
363      two different eh regions, and rest of exception handling code
364      does not like it.
365 
366      So if there is an abnormal edge to BB, proceed only if there is
367      no abnormal edge to DEST and there are no phi nodes in DEST.  */
368   if (bb_has_abnormal_pred (bb)
369       && (bb_has_abnormal_pred (dest)
370 	  || !gimple_seq_empty_p (phi_nodes (dest))))
371     return false;
372 
373   /* If there are phi nodes in DEST, and some of the blocks that are
374      predecessors of BB are also predecessors of DEST, check that the
375      phi node arguments match.  */
376   if (!gimple_seq_empty_p (phi_nodes (dest)))
377     {
378       FOR_EACH_EDGE (e, ei, bb->preds)
379 	{
380 	  s = find_edge (e->src, dest);
381 	  if (!s)
382 	    continue;
383 
384 	  if (!phi_alternatives_equal (dest, succ, s))
385 	    return false;
386 	}
387     }
388 
389   can_move_debug_stmts = MAY_HAVE_DEBUG_STMTS && single_pred_p (dest);
390 
391   /* Redirect the edges.  */
392   for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei)); )
393     {
394       bitmap_set_bit (cfgcleanup_altered_bbs, e->src->index);
395 
396       if (e->flags & EDGE_ABNORMAL)
397 	{
398 	  /* If there is an abnormal edge, redirect it anyway, and
399 	     move the labels to the new block to make it legal.  */
400 	  s = redirect_edge_succ_nodup (e, dest);
401 	}
402       else
403 	s = redirect_edge_and_branch (e, dest);
404 
405       if (s == e)
406 	{
407 	  /* Create arguments for the phi nodes, since the edge was not
408 	     here before.  */
409 	  for (gsi = gsi_start_phis (dest);
410 	       !gsi_end_p (gsi);
411 	       gsi_next (&gsi))
412 	    {
413 	      gimple phi = gsi_stmt (gsi);
414 	      source_location l = gimple_phi_arg_location_from_edge (phi, succ);
415 	      tree def = gimple_phi_arg_def (phi, succ->dest_idx);
416 	      add_phi_arg (phi, unshare_expr (def), s, l);
417 	    }
418 	}
419     }
420 
421   /* Move nonlocal labels and computed goto targets as well as user
422      defined labels and labels with an EH landing pad number to the
423      new block, so that the redirection of the abnormal edges works,
424      jump targets end up in a sane place and debug information for
425      labels is retained.  */
426   gsi_to = gsi_start_bb (dest);
427   for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
428     {
429       tree decl;
430       label = gsi_stmt (gsi);
431       if (is_gimple_debug (label))
432 	break;
433       decl = gimple_label_label (label);
434       if (EH_LANDING_PAD_NR (decl) != 0
435 	  || DECL_NONLOCAL (decl)
436 	  || FORCED_LABEL (decl)
437 	  || !DECL_ARTIFICIAL (decl))
438 	{
439 	  gsi_remove (&gsi, false);
440 	  gsi_insert_before (&gsi_to, label, GSI_SAME_STMT);
441 	}
442       else
443 	gsi_next (&gsi);
444     }
445 
446   /* Move debug statements if the destination has a single predecessor.  */
447   if (can_move_debug_stmts)
448     {
449       gsi_to = gsi_after_labels (dest);
450       for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); )
451 	{
452 	  gimple debug = gsi_stmt (gsi);
453 	  if (!is_gimple_debug (debug))
454 	    break;
455 	  gsi_remove (&gsi, false);
456 	  gsi_insert_before (&gsi_to, debug, GSI_SAME_STMT);
457 	}
458     }
459 
460   bitmap_set_bit (cfgcleanup_altered_bbs, dest->index);
461 
462   /* Update the dominators.  */
463   if (dom_info_available_p (CDI_DOMINATORS))
464     {
465       basic_block dom, dombb, domdest;
466 
467       dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
468       domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
469       if (domdest == bb)
470 	{
471 	  /* Shortcut to avoid calling (relatively expensive)
472 	     nearest_common_dominator unless necessary.  */
473 	  dom = dombb;
474 	}
475       else
476 	dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
477 
478       set_immediate_dominator (CDI_DOMINATORS, dest, dom);
479     }
480 
481   /* And kill the forwarder block.  */
482   delete_basic_block (bb);
483 
484   return true;
485 }
486 
487 /* STMT is a call that has been discovered noreturn.  Fixup the CFG
488    and remove LHS.  Return true if something changed.  */
489 
490 bool
491 fixup_noreturn_call (gimple stmt)
492 {
493   basic_block bb = gimple_bb (stmt);
494   bool changed = false;
495 
496   if (gimple_call_builtin_p (stmt, BUILT_IN_RETURN))
497     return false;
498 
499   /* First split basic block if stmt is not last.  */
500   if (stmt != gsi_stmt (gsi_last_bb (bb)))
501     {
502       if (stmt == gsi_stmt (gsi_last_nondebug_bb (bb)))
503 	{
504 	  /* Don't split if there are only debug stmts
505 	     after stmt, that can result in -fcompare-debug
506 	     failures.  Remove the debug stmts instead,
507 	     they should be all unreachable anyway.  */
508 	  gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
509 	  for (gsi_next (&gsi); !gsi_end_p (gsi); )
510 	    gsi_remove (&gsi, true);
511 	}
512       else
513 	split_block (bb, stmt);
514     }
515 
516   changed |= remove_fallthru_edge (bb->succs);
517 
518   /* If there is LHS, remove it.  */
519   if (gimple_call_lhs (stmt))
520     {
521       tree op = gimple_call_lhs (stmt);
522       gimple_call_set_lhs (stmt, NULL_TREE);
523 
524       /* We need to remove SSA name to avoid checking errors.
525 	 All uses are dominated by the noreturn and thus will
526 	 be removed afterwards.
527 	 We proactively remove affected non-PHI statements to avoid
528 	 fixup_cfg from trying to update them and crashing.  */
529       if (TREE_CODE (op) == SSA_NAME)
530 	{
531 	  use_operand_p use_p;
532           imm_use_iterator iter;
533 	  gimple use_stmt;
534 	  bitmap_iterator bi;
535 	  unsigned int bb_index;
536 
537 	  bitmap blocks = BITMAP_ALLOC (NULL);
538 
539           FOR_EACH_IMM_USE_STMT (use_stmt, iter, op)
540 	    {
541 	      if (gimple_code (use_stmt) != GIMPLE_PHI)
542 	        bitmap_set_bit (blocks, gimple_bb (use_stmt)->index);
543 	      else
544 		FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
545 		  SET_USE (use_p, error_mark_node);
546 	    }
547 	  EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi)
548 	    delete_basic_block (BASIC_BLOCK (bb_index));
549 	  BITMAP_FREE (blocks);
550 	  release_ssa_name (op);
551 	}
552       update_stmt (stmt);
553       changed = true;
554     }
555   return changed;
556 }
557 
558 
559 /* Split basic blocks on calls in the middle of a basic block that are now
560    known not to return, and remove the unreachable code.  */
561 
562 static bool
563 split_bbs_on_noreturn_calls (void)
564 {
565   bool changed = false;
566   gimple stmt;
567   basic_block bb;
568 
569   /* Detect cases where a mid-block call is now known not to return.  */
570   if (cfun->gimple_df)
571     while (vec_safe_length (MODIFIED_NORETURN_CALLS (cfun)))
572       {
573 	stmt = MODIFIED_NORETURN_CALLS (cfun)->pop ();
574 	bb = gimple_bb (stmt);
575 	/* BB might be deleted at this point, so verify first
576 	   BB is present in the cfg.  */
577 	if (bb == NULL
578 	    || bb->index < NUM_FIXED_BLOCKS
579 	    || bb->index >= last_basic_block
580 	    || BASIC_BLOCK (bb->index) != bb
581 	    || !gimple_call_noreturn_p (stmt))
582 	  continue;
583 
584 	changed |= fixup_noreturn_call (stmt);
585       }
586 
587   return changed;
588 }
589 
590 /* Tries to cleanup cfg in basic block BB.  Returns true if anything
591    changes.  */
592 
593 static bool
594 cleanup_tree_cfg_bb (basic_block bb)
595 {
596   bool retval = cleanup_control_flow_bb (bb);
597 
598   if (tree_forwarder_block_p (bb, false)
599       && remove_forwarder_block (bb))
600     return true;
601 
602   /* Merging the blocks may create new opportunities for folding
603      conditional branches (due to the elimination of single-valued PHI
604      nodes).  */
605   if (single_succ_p (bb)
606       && can_merge_blocks_p (bb, single_succ (bb)))
607     {
608       merge_blocks (bb, single_succ (bb));
609       return true;
610     }
611 
612   return retval;
613 }
614 
615 /* Iterate the cfg cleanups, while anything changes.  */
616 
617 static bool
618 cleanup_tree_cfg_1 (void)
619 {
620   bool retval = false;
621   basic_block bb;
622   unsigned i, n;
623 
624   retval |= split_bbs_on_noreturn_calls ();
625 
626   /* Prepare the worklists of altered blocks.  */
627   cfgcleanup_altered_bbs = BITMAP_ALLOC (NULL);
628 
629   /* During forwarder block cleanup, we may redirect edges out of
630      SWITCH_EXPRs, which can get expensive.  So we want to enable
631      recording of edge to CASE_LABEL_EXPR.  */
632   start_recording_case_labels ();
633 
634   /* Start by iterating over all basic blocks.  We cannot use FOR_EACH_BB,
635      since the basic blocks may get removed.  */
636   n = last_basic_block;
637   for (i = NUM_FIXED_BLOCKS; i < n; i++)
638     {
639       bb = BASIC_BLOCK (i);
640       if (bb)
641 	retval |= cleanup_tree_cfg_bb (bb);
642     }
643 
644   /* Now process the altered blocks, as long as any are available.  */
645   while (!bitmap_empty_p (cfgcleanup_altered_bbs))
646     {
647       i = bitmap_first_set_bit (cfgcleanup_altered_bbs);
648       bitmap_clear_bit (cfgcleanup_altered_bbs, i);
649       if (i < NUM_FIXED_BLOCKS)
650 	continue;
651 
652       bb = BASIC_BLOCK (i);
653       if (!bb)
654 	continue;
655 
656       retval |= cleanup_tree_cfg_bb (bb);
657 
658       /* Rerun split_bbs_on_noreturn_calls, in case we have altered any noreturn
659 	 calls.  */
660       retval |= split_bbs_on_noreturn_calls ();
661     }
662 
663   end_recording_case_labels ();
664   BITMAP_FREE (cfgcleanup_altered_bbs);
665   return retval;
666 }
667 
668 
669 /* Remove unreachable blocks and other miscellaneous clean up work.
670    Return true if the flowgraph was modified, false otherwise.  */
671 
672 static bool
673 cleanup_tree_cfg_noloop (void)
674 {
675   bool changed;
676 
677   timevar_push (TV_TREE_CLEANUP_CFG);
678 
679   /* Iterate until there are no more cleanups left to do.  If any
680      iteration changed the flowgraph, set CHANGED to true.
681 
682      If dominance information is available, there cannot be any unreachable
683      blocks.  */
684   if (!dom_info_available_p (CDI_DOMINATORS))
685     {
686       changed = delete_unreachable_blocks ();
687       calculate_dominance_info (CDI_DOMINATORS);
688     }
689   else
690     {
691 #ifdef ENABLE_CHECKING
692       verify_dominators (CDI_DOMINATORS);
693 #endif
694       changed = false;
695     }
696 
697   changed |= cleanup_tree_cfg_1 ();
698 
699   gcc_assert (dom_info_available_p (CDI_DOMINATORS));
700   compact_blocks ();
701 
702 #ifdef ENABLE_CHECKING
703   verify_flow_info ();
704 #endif
705 
706   timevar_pop (TV_TREE_CLEANUP_CFG);
707 
708   if (changed && current_loops)
709     loops_state_set (LOOPS_NEED_FIXUP);
710 
711   return changed;
712 }
713 
714 /* Repairs loop structures.  */
715 
716 static void
717 repair_loop_structures (void)
718 {
719   bitmap changed_bbs;
720   unsigned n_new_loops;
721 
722   calculate_dominance_info (CDI_DOMINATORS);
723 
724   timevar_push (TV_REPAIR_LOOPS);
725   changed_bbs = BITMAP_ALLOC (NULL);
726   n_new_loops = fix_loop_structure (changed_bbs);
727 
728   /* This usually does nothing.  But sometimes parts of cfg that originally
729      were inside a loop get out of it due to edge removal (since they
730      become unreachable by back edges from latch).  Also a former
731      irreducible loop can become reducible - in this case force a full
732      rewrite into loop-closed SSA form.  */
733   if (loops_state_satisfies_p (LOOP_CLOSED_SSA))
734     rewrite_into_loop_closed_ssa (n_new_loops ? NULL : changed_bbs,
735 				  TODO_update_ssa);
736 
737   BITMAP_FREE (changed_bbs);
738 
739 #ifdef ENABLE_CHECKING
740   verify_loop_structure ();
741 #endif
742   scev_reset ();
743 
744   timevar_pop (TV_REPAIR_LOOPS);
745 }
746 
747 /* Cleanup cfg and repair loop structures.  */
748 
749 bool
750 cleanup_tree_cfg (void)
751 {
752   bool changed = cleanup_tree_cfg_noloop ();
753 
754   if (current_loops != NULL
755       && loops_state_satisfies_p (LOOPS_NEED_FIXUP))
756     repair_loop_structures ();
757 
758   return changed;
759 }
760 
761 /* Merge the PHI nodes at BB into those at BB's sole successor.  */
762 
763 static void
764 remove_forwarder_block_with_phi (basic_block bb)
765 {
766   edge succ = single_succ_edge (bb);
767   basic_block dest = succ->dest;
768   gimple label;
769   basic_block dombb, domdest, dom;
770 
771   /* We check for infinite loops already in tree_forwarder_block_p.
772      However it may happen that the infinite loop is created
773      afterwards due to removal of forwarders.  */
774   if (dest == bb)
775     return;
776 
777   /* If the destination block consists of a nonlocal label, do not
778      merge it.  */
779   label = first_stmt (dest);
780   if (label
781       && gimple_code (label) == GIMPLE_LABEL
782       && DECL_NONLOCAL (gimple_label_label (label)))
783     return;
784 
785   /* Redirect each incoming edge to BB to DEST.  */
786   while (EDGE_COUNT (bb->preds) > 0)
787     {
788       edge e = EDGE_PRED (bb, 0), s;
789       gimple_stmt_iterator gsi;
790 
791       s = find_edge (e->src, dest);
792       if (s)
793 	{
794 	  /* We already have an edge S from E->src to DEST.  If S and
795 	     E->dest's sole successor edge have the same PHI arguments
796 	     at DEST, redirect S to DEST.  */
797 	  if (phi_alternatives_equal (dest, s, succ))
798 	    {
799 	      e = redirect_edge_and_branch (e, dest);
800 	      redirect_edge_var_map_clear (e);
801 	      continue;
802 	    }
803 
804 	  /* PHI arguments are different.  Create a forwarder block by
805 	     splitting E so that we can merge PHI arguments on E to
806 	     DEST.  */
807 	  e = single_succ_edge (split_edge (e));
808 	}
809 
810       s = redirect_edge_and_branch (e, dest);
811 
812       /* redirect_edge_and_branch must not create a new edge.  */
813       gcc_assert (s == e);
814 
815       /* Add to the PHI nodes at DEST each PHI argument removed at the
816 	 destination of E.  */
817       for (gsi = gsi_start_phis (dest);
818 	   !gsi_end_p (gsi);
819 	   gsi_next (&gsi))
820 	{
821 	  gimple phi = gsi_stmt (gsi);
822 	  tree def = gimple_phi_arg_def (phi, succ->dest_idx);
823 	  source_location locus = gimple_phi_arg_location_from_edge (phi, succ);
824 
825 	  if (TREE_CODE (def) == SSA_NAME)
826 	    {
827 	      edge_var_map_vector *head;
828 	      edge_var_map *vm;
829 	      size_t i;
830 
831 	      /* If DEF is one of the results of PHI nodes removed during
832 		 redirection, replace it with the PHI argument that used
833 		 to be on E.  */
834 	      head = redirect_edge_var_map_vector (e);
835 	      FOR_EACH_VEC_SAFE_ELT (head, i, vm)
836 		{
837 		  tree old_arg = redirect_edge_var_map_result (vm);
838 		  tree new_arg = redirect_edge_var_map_def (vm);
839 
840 		  if (def == old_arg)
841 		    {
842 		      def = new_arg;
843 		      locus = redirect_edge_var_map_location (vm);
844 		      break;
845 		    }
846 		}
847 	    }
848 
849 	  add_phi_arg (phi, def, s, locus);
850 	}
851 
852       redirect_edge_var_map_clear (e);
853     }
854 
855   /* Update the dominators.  */
856   dombb = get_immediate_dominator (CDI_DOMINATORS, bb);
857   domdest = get_immediate_dominator (CDI_DOMINATORS, dest);
858   if (domdest == bb)
859     {
860       /* Shortcut to avoid calling (relatively expensive)
861 	 nearest_common_dominator unless necessary.  */
862       dom = dombb;
863     }
864   else
865     dom = nearest_common_dominator (CDI_DOMINATORS, domdest, dombb);
866 
867   set_immediate_dominator (CDI_DOMINATORS, dest, dom);
868 
869   /* Remove BB since all of BB's incoming edges have been redirected
870      to DEST.  */
871   delete_basic_block (bb);
872 }
873 
874 /* This pass merges PHI nodes if one feeds into another.  For example,
875    suppose we have the following:
876 
877   goto <bb 9> (<L9>);
878 
879 <L8>:;
880   tem_17 = foo ();
881 
882   # tem_6 = PHI <tem_17(8), tem_23(7)>;
883 <L9>:;
884 
885   # tem_3 = PHI <tem_6(9), tem_2(5)>;
886 <L10>:;
887 
888   Then we merge the first PHI node into the second one like so:
889 
890   goto <bb 9> (<L10>);
891 
892 <L8>:;
893   tem_17 = foo ();
894 
895   # tem_3 = PHI <tem_23(7), tem_2(5), tem_17(8)>;
896 <L10>:;
897 */
898 
899 static unsigned int
900 merge_phi_nodes (void)
901 {
902   basic_block *worklist = XNEWVEC (basic_block, n_basic_blocks);
903   basic_block *current = worklist;
904   basic_block bb;
905 
906   calculate_dominance_info (CDI_DOMINATORS);
907 
908   /* Find all PHI nodes that we may be able to merge.  */
909   FOR_EACH_BB (bb)
910     {
911       basic_block dest;
912 
913       /* Look for a forwarder block with PHI nodes.  */
914       if (!tree_forwarder_block_p (bb, true))
915 	continue;
916 
917       dest = single_succ (bb);
918 
919       /* We have to feed into another basic block with PHI
920 	 nodes.  */
921       if (gimple_seq_empty_p (phi_nodes (dest))
922 	  /* We don't want to deal with a basic block with
923 	     abnormal edges.  */
924 	  || bb_has_abnormal_pred (bb))
925 	continue;
926 
927       if (!dominated_by_p (CDI_DOMINATORS, dest, bb))
928 	{
929 	  /* If BB does not dominate DEST, then the PHI nodes at
930 	     DEST must be the only users of the results of the PHI
931 	     nodes at BB.  */
932 	  *current++ = bb;
933 	}
934       else
935 	{
936 	  gimple_stmt_iterator gsi;
937 	  unsigned int dest_idx = single_succ_edge (bb)->dest_idx;
938 
939 	  /* BB dominates DEST.  There may be many users of the PHI
940 	     nodes in BB.  However, there is still a trivial case we
941 	     can handle.  If the result of every PHI in BB is used
942 	     only by a PHI in DEST, then we can trivially merge the
943 	     PHI nodes from BB into DEST.  */
944 	  for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
945 	       gsi_next (&gsi))
946 	    {
947 	      gimple phi = gsi_stmt (gsi);
948 	      tree result = gimple_phi_result (phi);
949 	      use_operand_p imm_use;
950 	      gimple use_stmt;
951 
952 	      /* If the PHI's result is never used, then we can just
953 		 ignore it.  */
954 	      if (has_zero_uses (result))
955 		continue;
956 
957 	      /* Get the single use of the result of this PHI node.  */
958   	      if (!single_imm_use (result, &imm_use, &use_stmt)
959 		  || gimple_code (use_stmt) != GIMPLE_PHI
960 		  || gimple_bb (use_stmt) != dest
961 		  || gimple_phi_arg_def (use_stmt, dest_idx) != result)
962 		break;
963 	    }
964 
965 	  /* If the loop above iterated through all the PHI nodes
966 	     in BB, then we can merge the PHIs from BB into DEST.  */
967 	  if (gsi_end_p (gsi))
968 	    *current++ = bb;
969 	}
970     }
971 
972   /* Now let's drain WORKLIST.  */
973   while (current != worklist)
974     {
975       bb = *--current;
976       remove_forwarder_block_with_phi (bb);
977     }
978 
979   free (worklist);
980   return 0;
981 }
982 
983 static bool
984 gate_merge_phi (void)
985 {
986   return 1;
987 }
988 
989 struct gimple_opt_pass pass_merge_phi =
990 {
991  {
992   GIMPLE_PASS,
993   "mergephi",			/* name */
994   OPTGROUP_NONE,                /* optinfo_flags */
995   gate_merge_phi,		/* gate */
996   merge_phi_nodes,		/* execute */
997   NULL,				/* sub */
998   NULL,				/* next */
999   0,				/* static_pass_number */
1000   TV_TREE_MERGE_PHI,		/* tv_id */
1001   PROP_cfg | PROP_ssa,		/* properties_required */
1002   0,				/* properties_provided */
1003   0,				/* properties_destroyed */
1004   0,				/* todo_flags_start */
1005   TODO_ggc_collect      	/* todo_flags_finish */
1006   | TODO_verify_ssa
1007  }
1008 };
1009