xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-dce.c (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1 /* Dead code elimination pass for the GNU compiler.
2    Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
3    Free Software Foundation, Inc.
4    Contributed by Ben Elliston <bje@redhat.com>
5    and Andrew MacLeod <amacleod@redhat.com>
6    Adapted to use control dependence by Steven Bosscher, SUSE Labs.
7 
8 This file is part of GCC.
9 
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by the
12 Free Software Foundation; either version 3, or (at your option) any
13 later version.
14 
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
18 for more details.
19 
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3.  If not see
22 <http://www.gnu.org/licenses/>.  */
23 
24 /* Dead code elimination.
25 
26    References:
27 
28      Building an Optimizing Compiler,
29      Robert Morgan, Butterworth-Heinemann, 1998, Section 8.9.
30 
31      Advanced Compiler Design and Implementation,
32      Steven Muchnick, Morgan Kaufmann, 1997, Section 18.10.
33 
34    Dead-code elimination is the removal of statements which have no
35    impact on the program's output.  "Dead statements" have no impact
36    on the program's output, while "necessary statements" may have
37    impact on the output.
38 
39    The algorithm consists of three phases:
40    1. Marking as necessary all statements known to be necessary,
41       e.g. most function calls, writing a value to memory, etc;
42    2. Propagating necessary statements, e.g., the statements
43       giving values to operands in necessary statements; and
44    3. Removing dead statements.  */
45 
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "ggc.h"
51 
52 /* These RTL headers are needed for basic-block.h.  */
53 #include "rtl.h"
54 #include "tm_p.h"
55 #include "hard-reg-set.h"
56 #include "obstack.h"
57 #include "basic-block.h"
58 
59 #include "tree.h"
60 #include "diagnostic.h"
61 #include "tree-flow.h"
62 #include "gimple.h"
63 #include "tree-dump.h"
64 #include "tree-pass.h"
65 #include "timevar.h"
66 #include "flags.h"
67 #include "cfgloop.h"
68 #include "tree-scalar-evolution.h"
69 
70 static struct stmt_stats
71 {
72   int total;
73   int total_phis;
74   int removed;
75   int removed_phis;
76 } stats;
77 
78 #define STMT_NECESSARY GF_PLF_1
79 
80 static VEC(gimple,heap) *worklist;
81 
82 /* Vector indicating an SSA name has already been processed and marked
83    as necessary.  */
84 static sbitmap processed;
85 
86 /* Vector indicating that last_stmt if a basic block has already been
87    marked as necessary.  */
88 static sbitmap last_stmt_necessary;
89 
90 /* Vector indicating that BB contains statements that are live.  */
91 static sbitmap bb_contains_live_stmts;
92 
93 /* Before we can determine whether a control branch is dead, we need to
94    compute which blocks are control dependent on which edges.
95 
96    We expect each block to be control dependent on very few edges so we
97    use a bitmap for each block recording its edges.  An array holds the
98    bitmap.  The Ith bit in the bitmap is set if that block is dependent
99    on the Ith edge.  */
100 static bitmap *control_dependence_map;
101 
102 /* Vector indicating that a basic block has already had all the edges
103    processed that it is control dependent on.  */
104 static sbitmap visited_control_parents;
105 
106 /* TRUE if this pass alters the CFG (by removing control statements).
107    FALSE otherwise.
108 
109    If this pass alters the CFG, then it will arrange for the dominators
110    to be recomputed.  */
111 static bool cfg_altered;
112 
113 /* Execute code that follows the macro for each edge (given number
114    EDGE_NUMBER within the CODE) for which the block with index N is
115    control dependent.  */
116 #define EXECUTE_IF_CONTROL_DEPENDENT(BI, N, EDGE_NUMBER)	\
117   EXECUTE_IF_SET_IN_BITMAP (control_dependence_map[(N)], 0,	\
118 			    (EDGE_NUMBER), (BI))
119 
120 
121 /* Indicate block BB is control dependent on an edge with index EDGE_INDEX.  */
122 static inline void
123 set_control_dependence_map_bit (basic_block bb, int edge_index)
124 {
125   if (bb == ENTRY_BLOCK_PTR)
126     return;
127   gcc_assert (bb != EXIT_BLOCK_PTR);
128   bitmap_set_bit (control_dependence_map[bb->index], edge_index);
129 }
130 
131 /* Clear all control dependences for block BB.  */
132 static inline void
133 clear_control_dependence_bitmap (basic_block bb)
134 {
135   bitmap_clear (control_dependence_map[bb->index]);
136 }
137 
138 
139 /* Find the immediate postdominator PDOM of the specified basic block BLOCK.
140    This function is necessary because some blocks have negative numbers.  */
141 
142 static inline basic_block
143 find_pdom (basic_block block)
144 {
145   gcc_assert (block != ENTRY_BLOCK_PTR);
146 
147   if (block == EXIT_BLOCK_PTR)
148     return EXIT_BLOCK_PTR;
149   else
150     {
151       basic_block bb = get_immediate_dominator (CDI_POST_DOMINATORS, block);
152       if (! bb)
153 	return EXIT_BLOCK_PTR;
154       return bb;
155     }
156 }
157 
158 
159 /* Determine all blocks' control dependences on the given edge with edge_list
160    EL index EDGE_INDEX, ala Morgan, Section 3.6.  */
161 
162 static void
163 find_control_dependence (struct edge_list *el, int edge_index)
164 {
165   basic_block current_block;
166   basic_block ending_block;
167 
168   gcc_assert (INDEX_EDGE_PRED_BB (el, edge_index) != EXIT_BLOCK_PTR);
169 
170   if (INDEX_EDGE_PRED_BB (el, edge_index) == ENTRY_BLOCK_PTR)
171     ending_block = single_succ (ENTRY_BLOCK_PTR);
172   else
173     ending_block = find_pdom (INDEX_EDGE_PRED_BB (el, edge_index));
174 
175   for (current_block = INDEX_EDGE_SUCC_BB (el, edge_index);
176        current_block != ending_block && current_block != EXIT_BLOCK_PTR;
177        current_block = find_pdom (current_block))
178     {
179       edge e = INDEX_EDGE (el, edge_index);
180 
181       /* For abnormal edges, we don't make current_block control
182 	 dependent because instructions that throw are always necessary
183 	 anyway.  */
184       if (e->flags & EDGE_ABNORMAL)
185 	continue;
186 
187       set_control_dependence_map_bit (current_block, edge_index);
188     }
189 }
190 
191 
192 /* Record all blocks' control dependences on all edges in the edge
193    list EL, ala Morgan, Section 3.6.  */
194 
195 static void
196 find_all_control_dependences (struct edge_list *el)
197 {
198   int i;
199 
200   for (i = 0; i < NUM_EDGES (el); ++i)
201     find_control_dependence (el, i);
202 }
203 
204 /* If STMT is not already marked necessary, mark it, and add it to the
205    worklist if ADD_TO_WORKLIST is true.  */
206 static inline void
207 mark_stmt_necessary (gimple stmt, bool add_to_worklist)
208 {
209   gcc_assert (stmt);
210 
211   if (gimple_plf (stmt, STMT_NECESSARY))
212     return;
213 
214   if (dump_file && (dump_flags & TDF_DETAILS))
215     {
216       fprintf (dump_file, "Marking useful stmt: ");
217       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
218       fprintf (dump_file, "\n");
219     }
220 
221   gimple_set_plf (stmt, STMT_NECESSARY, true);
222   if (add_to_worklist)
223     VEC_safe_push (gimple, heap, worklist, stmt);
224   if (bb_contains_live_stmts && !is_gimple_debug (stmt))
225     SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index);
226 }
227 
228 
229 /* Mark the statement defining operand OP as necessary.  */
230 
231 static inline void
232 mark_operand_necessary (tree op)
233 {
234   gimple stmt;
235   int ver;
236 
237   gcc_assert (op);
238 
239   ver = SSA_NAME_VERSION (op);
240   if (TEST_BIT (processed, ver))
241     {
242       stmt = SSA_NAME_DEF_STMT (op);
243       gcc_assert (gimple_nop_p (stmt)
244 		  || gimple_plf (stmt, STMT_NECESSARY));
245       return;
246     }
247   SET_BIT (processed, ver);
248 
249   stmt = SSA_NAME_DEF_STMT (op);
250   gcc_assert (stmt);
251 
252   if (gimple_plf (stmt, STMT_NECESSARY) || gimple_nop_p (stmt))
253     return;
254 
255   if (dump_file && (dump_flags & TDF_DETAILS))
256     {
257       fprintf (dump_file, "marking necessary through ");
258       print_generic_expr (dump_file, op, 0);
259       fprintf (dump_file, " stmt ");
260       print_gimple_stmt (dump_file, stmt, 0, 0);
261     }
262 
263   gimple_set_plf (stmt, STMT_NECESSARY, true);
264   if (bb_contains_live_stmts)
265     SET_BIT (bb_contains_live_stmts, gimple_bb (stmt)->index);
266   VEC_safe_push (gimple, heap, worklist, stmt);
267 }
268 
269 
270 /* Mark STMT as necessary if it obviously is.  Add it to the worklist if
271    it can make other statements necessary.
272 
273    If AGGRESSIVE is false, control statements are conservatively marked as
274    necessary.  */
275 
276 static void
277 mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
278 {
279   tree lhs = NULL_TREE;
280   /* With non-call exceptions, we have to assume that all statements could
281      throw.  If a statement may throw, it is inherently necessary.  */
282   if (flag_non_call_exceptions
283       && stmt_could_throw_p (stmt))
284     {
285       mark_stmt_necessary (stmt, true);
286       return;
287     }
288 
289   /* Statements that are implicitly live.  Most function calls, asm
290      and return statements are required.  Labels and GIMPLE_BIND nodes
291      are kept because they are control flow, and we have no way of
292      knowing whether they can be removed.  DCE can eliminate all the
293      other statements in a block, and CFG can then remove the block
294      and labels.  */
295   switch (gimple_code (stmt))
296     {
297     case GIMPLE_PREDICT:
298     case GIMPLE_LABEL:
299       mark_stmt_necessary (stmt, false);
300       return;
301 
302     case GIMPLE_ASM:
303     case GIMPLE_RESX:
304     case GIMPLE_RETURN:
305       mark_stmt_necessary (stmt, true);
306       return;
307 
308     case GIMPLE_CALL:
309       /* Most, but not all function calls are required.  Function calls that
310 	 produce no result and have no side effects (i.e. const pure
311 	 functions) are unnecessary.  */
312       if (gimple_has_side_effects (stmt))
313 	{
314 	  mark_stmt_necessary (stmt, true);
315 	  return;
316 	}
317       if (!gimple_call_lhs (stmt))
318         return;
319       lhs = gimple_call_lhs (stmt);
320       /* Fall through */
321 
322     case GIMPLE_ASSIGN:
323       if (!lhs)
324         lhs = gimple_assign_lhs (stmt);
325       break;
326 
327     case GIMPLE_DEBUG:
328       /* Debug temps without a value are not useful.  ??? If we could
329 	 easily locate the debug temp bind stmt for a use thereof,
330 	 would could refrain from marking all debug temps here, and
331 	 mark them only if they're used.  */
332       if (gimple_debug_bind_has_value_p (stmt)
333 	  || TREE_CODE (gimple_debug_bind_get_var (stmt)) != DEBUG_EXPR_DECL)
334 	mark_stmt_necessary (stmt, false);
335       return;
336 
337     case GIMPLE_GOTO:
338       gcc_assert (!simple_goto_p (stmt));
339       mark_stmt_necessary (stmt, true);
340       return;
341 
342     case GIMPLE_COND:
343       gcc_assert (EDGE_COUNT (gimple_bb (stmt)->succs) == 2);
344       /* Fall through.  */
345 
346     case GIMPLE_SWITCH:
347       if (! aggressive)
348 	mark_stmt_necessary (stmt, true);
349       break;
350 
351     default:
352       break;
353     }
354 
355   /* If the statement has volatile operands, it needs to be preserved.
356      Same for statements that can alter control flow in unpredictable
357      ways.  */
358   if (gimple_has_volatile_ops (stmt) || is_ctrl_altering_stmt (stmt))
359     {
360       mark_stmt_necessary (stmt, true);
361       return;
362     }
363 
364   if (is_hidden_global_store (stmt))
365     {
366       mark_stmt_necessary (stmt, true);
367       return;
368     }
369 
370   return;
371 }
372 
373 
374 /* Make corresponding control dependent edges necessary.  We only
375    have to do this once for each basic block, so we clear the bitmap
376    after we're done.  */
377 static void
378 mark_control_dependent_edges_necessary (basic_block bb, struct edge_list *el)
379 {
380   bitmap_iterator bi;
381   unsigned edge_number;
382 
383   gcc_assert (bb != EXIT_BLOCK_PTR);
384 
385   if (bb == ENTRY_BLOCK_PTR)
386     return;
387 
388   EXECUTE_IF_CONTROL_DEPENDENT (bi, bb->index, edge_number)
389     {
390       gimple stmt;
391       basic_block cd_bb = INDEX_EDGE_PRED_BB (el, edge_number);
392 
393       if (TEST_BIT (last_stmt_necessary, cd_bb->index))
394 	continue;
395       SET_BIT (last_stmt_necessary, cd_bb->index);
396       SET_BIT (bb_contains_live_stmts, cd_bb->index);
397 
398       stmt = last_stmt (cd_bb);
399       if (stmt && is_ctrl_stmt (stmt))
400 	mark_stmt_necessary (stmt, true);
401     }
402 }
403 
404 
405 /* Find obviously necessary statements.  These are things like most function
406    calls, and stores to file level variables.
407 
408    If EL is NULL, control statements are conservatively marked as
409    necessary.  Otherwise it contains the list of edges used by control
410    dependence analysis.  */
411 
412 static void
413 find_obviously_necessary_stmts (struct edge_list *el)
414 {
415   basic_block bb;
416   gimple_stmt_iterator gsi;
417   edge e;
418   gimple phi, stmt;
419 
420   FOR_EACH_BB (bb)
421     {
422       /* PHI nodes are never inherently necessary.  */
423       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
424 	{
425 	  phi = gsi_stmt (gsi);
426 	  gimple_set_plf (phi, STMT_NECESSARY, false);
427 	}
428 
429       /* Check all statements in the block.  */
430       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
431 	{
432 	  stmt = gsi_stmt (gsi);
433 	  gimple_set_plf (stmt, STMT_NECESSARY, false);
434 	  mark_stmt_if_obviously_necessary (stmt, el != NULL);
435 	}
436     }
437 
438   /* Pure and const functions are finite and thus have no infinite loops in
439      them.  */
440   if ((TREE_READONLY (current_function_decl)
441        || DECL_PURE_P (current_function_decl))
442       && !DECL_LOOPING_CONST_OR_PURE_P (current_function_decl))
443     return;
444 
445   /* Prevent the empty possibly infinite loops from being removed.  */
446   if (el)
447     {
448       loop_iterator li;
449       struct loop *loop;
450       scev_initialize ();
451       if (mark_irreducible_loops ())
452 	FOR_EACH_BB (bb)
453 	  {
454 	    edge_iterator ei;
455 	    FOR_EACH_EDGE (e, ei, bb->succs)
456 	      if ((e->flags & EDGE_DFS_BACK)
457 		  && (e->flags & EDGE_IRREDUCIBLE_LOOP))
458 		{
459 	          if (dump_file)
460 	            fprintf (dump_file, "Marking back edge of irreducible loop %i->%i\n",
461 		    	     e->src->index, e->dest->index);
462 		  mark_control_dependent_edges_necessary (e->dest, el);
463 		}
464 	  }
465 
466       FOR_EACH_LOOP (li, loop, 0)
467 	if (!finite_loop_p (loop))
468 	  {
469 	    if (dump_file)
470 	      fprintf (dump_file, "can not prove finiteness of loop %i\n", loop->num);
471 	    mark_control_dependent_edges_necessary (loop->latch, el);
472 	  }
473       scev_finalize ();
474     }
475 }
476 
477 
478 /* Return true if REF is based on an aliased base, otherwise false.  */
479 
480 static bool
481 ref_may_be_aliased (tree ref)
482 {
483   while (handled_component_p (ref))
484     ref = TREE_OPERAND (ref, 0);
485   return !(DECL_P (ref)
486 	   && !may_be_aliased (ref));
487 }
488 
489 static bitmap visited = NULL;
490 static unsigned int longest_chain = 0;
491 static unsigned int total_chain = 0;
492 static unsigned int nr_walks = 0;
493 static bool chain_ovfl = false;
494 
495 /* Worker for the walker that marks reaching definitions of REF,
496    which is based on a non-aliased decl, necessary.  It returns
497    true whenever the defining statement of the current VDEF is
498    a kill for REF, as no dominating may-defs are necessary for REF
499    anymore.  DATA points to the basic-block that contains the
500    stmt that refers to REF.  */
501 
502 static bool
503 mark_aliased_reaching_defs_necessary_1 (ao_ref *ref, tree vdef, void *data)
504 {
505   gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
506 
507   /* All stmts we visit are necessary.  */
508   mark_operand_necessary (vdef);
509 
510   /* If the stmt lhs kills ref, then we can stop walking.  */
511   if (gimple_has_lhs (def_stmt)
512       && TREE_CODE (gimple_get_lhs (def_stmt)) != SSA_NAME
513       /* The assignment is not necessarily carried out if it can throw
514          and we can catch it in the current function where we could inspect
515 	 the previous value.
516          ???  We only need to care about the RHS throwing.  For aggregate
517 	 assignments or similar calls and non-call exceptions the LHS
518 	 might throw as well.  */
519       && !stmt_can_throw_internal (def_stmt))
520     {
521       tree base, lhs = gimple_get_lhs (def_stmt);
522       HOST_WIDE_INT size, offset, max_size;
523       ao_ref_base (ref);
524       base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
525       /* We can get MEM[symbol: sZ, index: D.8862_1] here,
526 	 so base == refd->base does not always hold.  */
527       if (base == ref->base)
528 	{
529 	  /* For a must-alias check we need to be able to constrain
530 	     the accesses properly.  */
531 	  if (size != -1 && size == max_size
532 	      && ref->max_size != -1)
533 	    {
534 	      if (offset <= ref->offset
535 		  && offset + size >= ref->offset + ref->max_size)
536 		return true;
537 	    }
538 	  /* Or they need to be exactly the same.  */
539 	  else if (ref->ref
540 		   /* Make sure there is no induction variable involved
541 		      in the references (gcc.c-torture/execute/pr42142.c).
542 		      The simplest way is to check if the kill dominates
543 		      the use.  */
544 		   && dominated_by_p (CDI_DOMINATORS, (basic_block) data,
545 				      gimple_bb (def_stmt))
546 		   && operand_equal_p (ref->ref, lhs, 0))
547 	    return true;
548 	}
549     }
550 
551   /* Otherwise keep walking.  */
552   return false;
553 }
554 
555 static void
556 mark_aliased_reaching_defs_necessary (gimple stmt, tree ref)
557 {
558   unsigned int chain;
559   ao_ref refd;
560   gcc_assert (!chain_ovfl);
561   ao_ref_init (&refd, ref);
562   chain = walk_aliased_vdefs (&refd, gimple_vuse (stmt),
563 			      mark_aliased_reaching_defs_necessary_1,
564 			      gimple_bb (stmt), NULL);
565   if (chain > longest_chain)
566     longest_chain = chain;
567   total_chain += chain;
568   nr_walks++;
569 }
570 
571 /* Worker for the walker that marks reaching definitions of REF, which
572    is not based on a non-aliased decl.  For simplicity we need to end
573    up marking all may-defs necessary that are not based on a non-aliased
574    decl.  The only job of this walker is to skip may-defs based on
575    a non-aliased decl.  */
576 
577 static bool
578 mark_all_reaching_defs_necessary_1 (ao_ref *ref ATTRIBUTE_UNUSED,
579 				    tree vdef, void *data ATTRIBUTE_UNUSED)
580 {
581   gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
582 
583   /* We have to skip already visited (and thus necessary) statements
584      to make the chaining work after we dropped back to simple mode.  */
585   if (chain_ovfl
586       && TEST_BIT (processed, SSA_NAME_VERSION (vdef)))
587     {
588       gcc_assert (gimple_nop_p (def_stmt)
589 		  || gimple_plf (def_stmt, STMT_NECESSARY));
590       return false;
591     }
592 
593   /* We want to skip stores to non-aliased variables.  */
594   if (!chain_ovfl
595       && gimple_assign_single_p (def_stmt))
596     {
597       tree lhs = gimple_assign_lhs (def_stmt);
598       if (!ref_may_be_aliased (lhs))
599 	return false;
600     }
601 
602   mark_operand_necessary (vdef);
603 
604   return false;
605 }
606 
607 static void
608 mark_all_reaching_defs_necessary (gimple stmt)
609 {
610   walk_aliased_vdefs (NULL, gimple_vuse (stmt),
611 		      mark_all_reaching_defs_necessary_1, NULL, &visited);
612 }
613 
614 /* Return true for PHI nodes with one or identical arguments
615    can be removed.  */
616 static bool
617 degenerate_phi_p (gimple phi)
618 {
619   unsigned int i;
620   tree op = gimple_phi_arg_def (phi, 0);
621   for (i = 1; i < gimple_phi_num_args (phi); i++)
622     if (gimple_phi_arg_def (phi, i) != op)
623       return false;
624   return true;
625 }
626 
627 /* Propagate necessity using the operands of necessary statements.
628    Process the uses on each statement in the worklist, and add all
629    feeding statements which contribute to the calculation of this
630    value to the worklist.
631 
632    In conservative mode, EL is NULL.  */
633 
634 static void
635 propagate_necessity (struct edge_list *el)
636 {
637   gimple stmt;
638   bool aggressive = (el ? true : false);
639 
640   if (dump_file && (dump_flags & TDF_DETAILS))
641     fprintf (dump_file, "\nProcessing worklist:\n");
642 
643   while (VEC_length (gimple, worklist) > 0)
644     {
645       /* Take STMT from worklist.  */
646       stmt = VEC_pop (gimple, worklist);
647 
648       if (dump_file && (dump_flags & TDF_DETAILS))
649 	{
650 	  fprintf (dump_file, "processing: ");
651 	  print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
652 	  fprintf (dump_file, "\n");
653 	}
654 
655       if (aggressive)
656 	{
657 	  /* Mark the last statements of the basic blocks that the block
658 	     containing STMT is control dependent on, but only if we haven't
659 	     already done so.  */
660 	  basic_block bb = gimple_bb (stmt);
661 	  if (bb != ENTRY_BLOCK_PTR
662 	      && ! TEST_BIT (visited_control_parents, bb->index))
663 	    {
664 	      SET_BIT (visited_control_parents, bb->index);
665 	      mark_control_dependent_edges_necessary (bb, el);
666 	    }
667 	}
668 
669       if (gimple_code (stmt) == GIMPLE_PHI
670 	  /* We do not process virtual PHI nodes nor do we track their
671 	     necessity.  */
672 	  && is_gimple_reg (gimple_phi_result (stmt)))
673 	{
674 	  /* PHI nodes are somewhat special in that each PHI alternative has
675 	     data and control dependencies.  All the statements feeding the
676 	     PHI node's arguments are always necessary.  In aggressive mode,
677 	     we also consider the control dependent edges leading to the
678 	     predecessor block associated with each PHI alternative as
679 	     necessary.  */
680 	  size_t k;
681 
682 	  for (k = 0; k < gimple_phi_num_args (stmt); k++)
683             {
684 	      tree arg = PHI_ARG_DEF (stmt, k);
685 	      if (TREE_CODE (arg) == SSA_NAME)
686 		mark_operand_necessary (arg);
687 	    }
688 
689 	  if (aggressive && !degenerate_phi_p (stmt))
690 	    {
691 	      for (k = 0; k < gimple_phi_num_args (stmt); k++)
692 		{
693 		  basic_block arg_bb = gimple_phi_arg_edge (stmt, k)->src;
694 		  if (arg_bb != ENTRY_BLOCK_PTR
695 		      && ! TEST_BIT (visited_control_parents, arg_bb->index))
696 		    {
697 		      SET_BIT (visited_control_parents, arg_bb->index);
698 		      mark_control_dependent_edges_necessary (arg_bb, el);
699 		    }
700 		}
701 	    }
702 	}
703       else
704 	{
705 	  /* Propagate through the operands.  Examine all the USE, VUSE and
706 	     VDEF operands in this statement.  Mark all the statements
707 	     which feed this statement's uses as necessary.  */
708 	  ssa_op_iter iter;
709 	  tree use;
710 
711 	  FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
712 	    mark_operand_necessary (use);
713 
714 	  use = gimple_vuse (stmt);
715 	  if (!use)
716 	    continue;
717 
718 	  /* If we dropped to simple mode make all immediately
719 	     reachable definitions necessary.  */
720 	  if (chain_ovfl)
721 	    {
722 	      mark_all_reaching_defs_necessary (stmt);
723 	      continue;
724 	    }
725 
726 	  /* For statements that may load from memory (have a VUSE) we
727 	     have to mark all reaching (may-)definitions as necessary.
728 	     We partition this task into two cases:
729 	      1) explicit loads based on decls that are not aliased
730 	      2) implicit loads (like calls) and explicit loads not
731 	         based on decls that are not aliased (like indirect
732 		 references or loads from globals)
733 	     For 1) we mark all reaching may-defs as necessary, stopping
734 	     at dominating kills.  For 2) we want to mark all dominating
735 	     references necessary, but non-aliased ones which we handle
736 	     in 1).  By keeping a global visited bitmap for references
737 	     we walk for 2) we avoid quadratic behavior for those.  */
738 
739 	  if (is_gimple_call (stmt))
740 	    {
741 	      tree callee = gimple_call_fndecl (stmt);
742 	      unsigned i;
743 
744 	      /* Calls to functions that are merely acting as barriers
745 		 or that only store to memory do not make any previous
746 		 stores necessary.  */
747 	      if (callee != NULL_TREE
748 		  && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
749 		  && (DECL_FUNCTION_CODE (callee) == BUILT_IN_MEMSET
750 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_MALLOC
751 		      || DECL_FUNCTION_CODE (callee) == BUILT_IN_FREE))
752 		continue;
753 
754 	      /* Calls implicitly load from memory, their arguments
755 	         in addition may explicitly perform memory loads.  */
756 	      mark_all_reaching_defs_necessary (stmt);
757 	      for (i = 0; i < gimple_call_num_args (stmt); ++i)
758 		{
759 		  tree arg = gimple_call_arg (stmt, i);
760 		  if (TREE_CODE (arg) == SSA_NAME
761 		      || is_gimple_min_invariant (arg))
762 		    continue;
763 		  if (!ref_may_be_aliased (arg))
764 		    mark_aliased_reaching_defs_necessary (stmt, arg);
765 		}
766 	    }
767 	  else if (gimple_assign_single_p (stmt))
768 	    {
769 	      tree rhs;
770 	      bool rhs_aliased = false;
771 	      /* If this is a load mark things necessary.  */
772 	      rhs = gimple_assign_rhs1 (stmt);
773 	      if (TREE_CODE (rhs) != SSA_NAME
774 		  && !is_gimple_min_invariant (rhs))
775 		{
776 		  if (!ref_may_be_aliased (rhs))
777 		    mark_aliased_reaching_defs_necessary (stmt, rhs);
778 		  else
779 		    rhs_aliased = true;
780 		}
781 	      if (rhs_aliased)
782 		mark_all_reaching_defs_necessary (stmt);
783 	    }
784 	  else if (gimple_code (stmt) == GIMPLE_RETURN)
785 	    {
786 	      tree rhs = gimple_return_retval (stmt);
787 	      /* A return statement may perform a load.  */
788 	      if (TREE_CODE (rhs) != SSA_NAME
789 		  && !is_gimple_min_invariant (rhs))
790 		{
791 		  if (!ref_may_be_aliased (rhs))
792 		    mark_aliased_reaching_defs_necessary (stmt, rhs);
793 		  else
794 		    mark_all_reaching_defs_necessary (stmt);
795 		}
796 	    }
797 	  else if (gimple_code (stmt) == GIMPLE_ASM)
798 	    {
799 	      unsigned i;
800 	      mark_all_reaching_defs_necessary (stmt);
801 	      /* Inputs may perform loads.  */
802 	      for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
803 		{
804 		  tree op = TREE_VALUE (gimple_asm_input_op (stmt, i));
805 		  if (TREE_CODE (op) != SSA_NAME
806 		      && !is_gimple_min_invariant (op)
807 		      && !ref_may_be_aliased (op))
808 		    mark_aliased_reaching_defs_necessary (stmt, op);
809 		}
810 	    }
811 	  else
812 	    gcc_unreachable ();
813 
814 	  /* If we over-used our alias oracle budget drop to simple
815 	     mode.  The cost metric allows quadratic behavior
816 	     (number of uses times number of may-defs queries) up to
817 	     a constant maximal number of queries and after that falls back to
818 	     super-linear complexity.  */
819 	  if (/* Constant but quadratic for small functions.  */
820 	      total_chain > 128 * 128
821 	      /* Linear in the number of may-defs.  */
822 	      && total_chain > 32 * longest_chain
823 	      /* Linear in the number of uses.  */
824 	      && total_chain > nr_walks * 32)
825 	    {
826 	      chain_ovfl = true;
827 	      if (visited)
828 		bitmap_clear (visited);
829 	    }
830 	}
831     }
832 }
833 
834 /* Replace all uses of result of PHI by underlying variable and mark it
835    for renaming.  */
836 
837 void
838 mark_virtual_phi_result_for_renaming (gimple phi)
839 {
840   bool used = false;
841   imm_use_iterator iter;
842   use_operand_p use_p;
843   gimple stmt;
844   tree result_ssa, result_var;
845 
846   if (dump_file && (dump_flags & TDF_DETAILS))
847     {
848       fprintf (dump_file, "Marking result for renaming : ");
849       print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
850       fprintf (dump_file, "\n");
851     }
852 
853   result_ssa = gimple_phi_result (phi);
854   result_var = SSA_NAME_VAR (result_ssa);
855   FOR_EACH_IMM_USE_STMT (stmt, iter, result_ssa)
856     {
857       FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
858         SET_USE (use_p, result_var);
859       update_stmt (stmt);
860       used = true;
861     }
862   if (used)
863     mark_sym_for_renaming (result_var);
864 }
865 
866 /* Remove dead PHI nodes from block BB.  */
867 
868 static bool
869 remove_dead_phis (basic_block bb)
870 {
871   bool something_changed = false;
872   gimple_seq phis;
873   gimple phi;
874   gimple_stmt_iterator gsi;
875   phis = phi_nodes (bb);
876 
877   for (gsi = gsi_start (phis); !gsi_end_p (gsi);)
878     {
879       stats.total_phis++;
880       phi = gsi_stmt (gsi);
881 
882       /* We do not track necessity of virtual PHI nodes.  Instead do
883          very simple dead PHI removal here.  */
884       if (!is_gimple_reg (gimple_phi_result (phi)))
885 	{
886 	  /* Virtual PHI nodes with one or identical arguments
887 	     can be removed.  */
888 	  if (degenerate_phi_p (phi))
889 	    {
890 	      tree vdef = gimple_phi_result (phi);
891 	      tree vuse = gimple_phi_arg_def (phi, 0);
892 
893 	      use_operand_p use_p;
894 	      imm_use_iterator iter;
895 	      gimple use_stmt;
896 	      FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
897 		FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
898 		  SET_USE (use_p, vuse);
899 	      if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef)
900 	          && TREE_CODE (vuse) == SSA_NAME)
901 		SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
902 	    }
903 	  else
904 	    gimple_set_plf (phi, STMT_NECESSARY, true);
905 	}
906 
907       if (!gimple_plf (phi, STMT_NECESSARY))
908 	{
909 	  something_changed = true;
910 	  if (dump_file && (dump_flags & TDF_DETAILS))
911 	    {
912 	      fprintf (dump_file, "Deleting : ");
913 	      print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
914 	      fprintf (dump_file, "\n");
915 	    }
916 
917 	  remove_phi_node (&gsi, true);
918 	  stats.removed_phis++;
919 	  continue;
920 	}
921 
922       gsi_next (&gsi);
923     }
924   return something_changed;
925 }
926 
927 /* Forward edge E to respective POST_DOM_BB and update PHIs.  */
928 
929 static edge
930 forward_edge_to_pdom (edge e, basic_block post_dom_bb)
931 {
932   gimple_stmt_iterator gsi;
933   edge e2 = NULL;
934   edge_iterator ei;
935 
936   if (dump_file && (dump_flags & TDF_DETAILS))
937     fprintf (dump_file, "Redirecting edge %i->%i to %i\n", e->src->index,
938 	     e->dest->index, post_dom_bb->index);
939 
940   e2 = redirect_edge_and_branch (e, post_dom_bb);
941   cfg_altered = true;
942 
943   /* If edge was already around, no updating is neccesary.  */
944   if (e2 != e)
945     return e2;
946 
947   if (!gimple_seq_empty_p (phi_nodes (post_dom_bb)))
948     {
949       /* We are sure that for every live PHI we are seeing control dependent BB.
950          This means that we can pick any edge to duplicate PHI args from.  */
951       FOR_EACH_EDGE (e2, ei, post_dom_bb->preds)
952 	if (e2 != e)
953 	  break;
954       for (gsi = gsi_start_phis (post_dom_bb); !gsi_end_p (gsi);)
955 	{
956 	  gimple phi = gsi_stmt (gsi);
957 	  tree op;
958 	  source_location locus;
959 
960 	  /* PHIs for virtuals have no control dependency relation on them.
961 	     We are lost here and must force renaming of the symbol.  */
962 	  if (!is_gimple_reg (gimple_phi_result (phi)))
963 	    {
964 	      mark_virtual_phi_result_for_renaming (phi);
965 	      remove_phi_node (&gsi, true);
966 	      continue;
967 	    }
968 
969 	  /* Dead PHI do not imply control dependency.  */
970           if (!gimple_plf (phi, STMT_NECESSARY))
971 	    {
972 	      gsi_next (&gsi);
973 	      continue;
974 	    }
975 
976 	  op = gimple_phi_arg_def (phi, e2->dest_idx);
977 	  locus = gimple_phi_arg_location (phi, e2->dest_idx);
978 	  add_phi_arg (phi, op, e, locus);
979 	  /* The resulting PHI if not dead can only be degenerate.  */
980 	  gcc_assert (degenerate_phi_p (phi));
981 	  gsi_next (&gsi);
982 	}
983     }
984   return e;
985 }
986 
987 /* Remove dead statement pointed to by iterator I.  Receives the basic block BB
988    containing I so that we don't have to look it up.  */
989 
990 static void
991 remove_dead_stmt (gimple_stmt_iterator *i, basic_block bb)
992 {
993   gimple stmt = gsi_stmt (*i);
994 
995   if (dump_file && (dump_flags & TDF_DETAILS))
996     {
997       fprintf (dump_file, "Deleting : ");
998       print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
999       fprintf (dump_file, "\n");
1000     }
1001 
1002   stats.removed++;
1003 
1004   /* If we have determined that a conditional branch statement contributes
1005      nothing to the program, then we not only remove it, but we also change
1006      the flow graph so that the current block will simply fall-thru to its
1007      immediate post-dominator.  The blocks we are circumventing will be
1008      removed by cleanup_tree_cfg if this change in the flow graph makes them
1009      unreachable.  */
1010   if (is_ctrl_stmt (stmt))
1011     {
1012       basic_block post_dom_bb;
1013       edge e, e2;
1014       edge_iterator ei;
1015 
1016       post_dom_bb = get_immediate_dominator (CDI_POST_DOMINATORS, bb);
1017 
1018       e = find_edge (bb, post_dom_bb);
1019 
1020       /* If edge is already there, try to use it.  This avoids need to update
1021          PHI nodes.  Also watch for cases where post dominator does not exists
1022 	 or is exit block.  These can happen for infinite loops as we create
1023 	 fake edges in the dominator tree.  */
1024       if (e)
1025         ;
1026       else if (! post_dom_bb || post_dom_bb == EXIT_BLOCK_PTR)
1027 	e = EDGE_SUCC (bb, 0);
1028       else
1029         e = forward_edge_to_pdom (EDGE_SUCC (bb, 0), post_dom_bb);
1030       gcc_assert (e);
1031       e->probability = REG_BR_PROB_BASE;
1032       e->count = bb->count;
1033 
1034       /* The edge is no longer associated with a conditional, so it does
1035 	 not have TRUE/FALSE flags.  */
1036       e->flags &= ~(EDGE_TRUE_VALUE | EDGE_FALSE_VALUE);
1037 
1038       /* The lone outgoing edge from BB will be a fallthru edge.  */
1039       e->flags |= EDGE_FALLTHRU;
1040 
1041       /* Remove the remaining outgoing edges.  */
1042       for (ei = ei_start (bb->succs); (e2 = ei_safe_edge (ei)); )
1043 	if (e != e2)
1044 	  {
1045 	    cfg_altered = true;
1046             remove_edge (e2);
1047 	  }
1048 	else
1049 	  ei_next (&ei);
1050     }
1051 
1052   unlink_stmt_vdef (stmt);
1053   gsi_remove (i, true);
1054   release_defs (stmt);
1055 }
1056 
1057 /* Eliminate unnecessary statements. Any instruction not marked as necessary
1058    contributes nothing to the program, and can be deleted.  */
1059 
1060 static bool
1061 eliminate_unnecessary_stmts (void)
1062 {
1063   bool something_changed = false;
1064   basic_block bb;
1065   gimple_stmt_iterator gsi, psi;
1066   gimple stmt;
1067   tree call;
1068   VEC (basic_block, heap) *h;
1069 
1070   if (dump_file && (dump_flags & TDF_DETAILS))
1071     fprintf (dump_file, "\nEliminating unnecessary statements:\n");
1072 
1073   clear_special_calls ();
1074 
1075   /* Walking basic blocks and statements in reverse order avoids
1076      releasing SSA names before any other DEFs that refer to them are
1077      released.  This helps avoid loss of debug information, as we get
1078      a chance to propagate all RHSs of removed SSAs into debug uses,
1079      rather than only the latest ones.  E.g., consider:
1080 
1081      x_3 = y_1 + z_2;
1082      a_5 = x_3 - b_4;
1083      # DEBUG a => a_5
1084 
1085      If we were to release x_3 before a_5, when we reached a_5 and
1086      tried to substitute it into the debug stmt, we'd see x_3 there,
1087      but x_3's DEF, type, etc would have already been disconnected.
1088      By going backwards, the debug stmt first changes to:
1089 
1090      # DEBUG a => x_3 - b_4
1091 
1092      and then to:
1093 
1094      # DEBUG a => y_1 + z_2 - b_4
1095 
1096      as desired.  */
1097   gcc_assert (dom_info_available_p (CDI_DOMINATORS));
1098   h = get_all_dominated_blocks (CDI_DOMINATORS, single_succ (ENTRY_BLOCK_PTR));
1099 
1100   while (VEC_length (basic_block, h))
1101     {
1102       bb = VEC_pop (basic_block, h);
1103 
1104       /* Remove dead statements.  */
1105       for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi); gsi = psi)
1106 	{
1107 	  stmt = gsi_stmt (gsi);
1108 
1109 	  psi = gsi;
1110 	  gsi_prev (&psi);
1111 
1112 	  stats.total++;
1113 
1114 	  /* If GSI is not necessary then remove it.  */
1115 	  if (!gimple_plf (stmt, STMT_NECESSARY))
1116 	    {
1117 	      if (!is_gimple_debug (stmt))
1118 		something_changed = true;
1119 	      remove_dead_stmt (&gsi, bb);
1120 	    }
1121 	  else if (is_gimple_call (stmt))
1122 	    {
1123 	      call = gimple_call_fndecl (stmt);
1124 	      if (call)
1125 		{
1126 		  tree name;
1127 
1128 		  /* When LHS of var = call (); is dead, simplify it into
1129 		     call (); saving one operand.  */
1130 		  name = gimple_call_lhs (stmt);
1131 		  if (name && TREE_CODE (name) == SSA_NAME
1132 		           && !TEST_BIT (processed, SSA_NAME_VERSION (name)))
1133 		    {
1134 		      something_changed = true;
1135 		      if (dump_file && (dump_flags & TDF_DETAILS))
1136 			{
1137 			  fprintf (dump_file, "Deleting LHS of call: ");
1138 			  print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
1139 			  fprintf (dump_file, "\n");
1140 			}
1141 
1142 		      gimple_call_set_lhs (stmt, NULL_TREE);
1143 		      maybe_clean_or_replace_eh_stmt (stmt, stmt);
1144 		      update_stmt (stmt);
1145 		      release_ssa_name (name);
1146 		    }
1147 		  notice_special_calls (stmt);
1148 		}
1149 	    }
1150 	}
1151     }
1152 
1153   VEC_free (basic_block, heap, h);
1154 
1155   /* Since we don't track liveness of virtual PHI nodes, it is possible that we
1156      rendered some PHI nodes unreachable while they are still in use.
1157      Mark them for renaming.  */
1158   if (cfg_altered)
1159     {
1160       basic_block prev_bb;
1161 
1162       find_unreachable_blocks ();
1163 
1164       /* Delete all unreachable basic blocks in reverse dominator order.  */
1165       for (bb = EXIT_BLOCK_PTR->prev_bb; bb != ENTRY_BLOCK_PTR; bb = prev_bb)
1166 	{
1167 	  prev_bb = bb->prev_bb;
1168 
1169 	  if (!TEST_BIT (bb_contains_live_stmts, bb->index)
1170 	      || !(bb->flags & BB_REACHABLE))
1171 	    {
1172 	      for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1173 		if (!is_gimple_reg (gimple_phi_result (gsi_stmt (gsi))))
1174 		  {
1175 		    bool found = false;
1176 		    imm_use_iterator iter;
1177 
1178 		    FOR_EACH_IMM_USE_STMT (stmt, iter, gimple_phi_result (gsi_stmt (gsi)))
1179 		      {
1180 			if (!(gimple_bb (stmt)->flags & BB_REACHABLE))
1181 			  continue;
1182 			if (gimple_code (stmt) == GIMPLE_PHI
1183 			    || gimple_plf (stmt, STMT_NECESSARY))
1184 			  {
1185 			    found = true;
1186 			    BREAK_FROM_IMM_USE_STMT (iter);
1187 			  }
1188 		      }
1189 		    if (found)
1190 		      mark_virtual_phi_result_for_renaming (gsi_stmt (gsi));
1191 		  }
1192 
1193 	      if (!(bb->flags & BB_REACHABLE))
1194 		{
1195 		  /* Speed up the removal of blocks that don't
1196 		     dominate others.  Walking backwards, this should
1197 		     be the common case.  ??? Do we need to recompute
1198 		     dominators because of cfg_altered?  */
1199 		  if (!MAY_HAVE_DEBUG_STMTS
1200 		      || !first_dom_son (CDI_DOMINATORS, bb))
1201 		    delete_basic_block (bb);
1202 		  else
1203 		    {
1204 		      h = get_all_dominated_blocks (CDI_DOMINATORS, bb);
1205 
1206 		      while (VEC_length (basic_block, h))
1207 			{
1208 			  bb = VEC_pop (basic_block, h);
1209 			  prev_bb = bb->prev_bb;
1210 			  /* Rearrangements to the CFG may have failed
1211 			     to update the dominators tree, so that
1212 			     formerly-dominated blocks are now
1213 			     otherwise reachable.  */
1214 			  if (!!(bb->flags & BB_REACHABLE))
1215 			    continue;
1216 			  delete_basic_block (bb);
1217 			}
1218 
1219 		      VEC_free (basic_block, heap, h);
1220 		    }
1221 		}
1222 	    }
1223 	}
1224     }
1225   FOR_EACH_BB (bb)
1226     {
1227       /* Remove dead PHI nodes.  */
1228       something_changed |= remove_dead_phis (bb);
1229     }
1230 
1231   return something_changed;
1232 }
1233 
1234 
1235 /* Print out removed statement statistics.  */
1236 
1237 static void
1238 print_stats (void)
1239 {
1240   float percg;
1241 
1242   percg = ((float) stats.removed / (float) stats.total) * 100;
1243   fprintf (dump_file, "Removed %d of %d statements (%d%%)\n",
1244 	   stats.removed, stats.total, (int) percg);
1245 
1246   if (stats.total_phis == 0)
1247     percg = 0;
1248   else
1249     percg = ((float) stats.removed_phis / (float) stats.total_phis) * 100;
1250 
1251   fprintf (dump_file, "Removed %d of %d PHI nodes (%d%%)\n",
1252 	   stats.removed_phis, stats.total_phis, (int) percg);
1253 }
1254 
1255 /* Initialization for this pass.  Set up the used data structures.  */
1256 
1257 static void
1258 tree_dce_init (bool aggressive)
1259 {
1260   memset ((void *) &stats, 0, sizeof (stats));
1261 
1262   if (aggressive)
1263     {
1264       int i;
1265 
1266       control_dependence_map = XNEWVEC (bitmap, last_basic_block);
1267       for (i = 0; i < last_basic_block; ++i)
1268 	control_dependence_map[i] = BITMAP_ALLOC (NULL);
1269 
1270       last_stmt_necessary = sbitmap_alloc (last_basic_block);
1271       sbitmap_zero (last_stmt_necessary);
1272       bb_contains_live_stmts = sbitmap_alloc (last_basic_block);
1273       sbitmap_zero (bb_contains_live_stmts);
1274     }
1275 
1276   processed = sbitmap_alloc (num_ssa_names + 1);
1277   sbitmap_zero (processed);
1278 
1279   worklist = VEC_alloc (gimple, heap, 64);
1280   cfg_altered = false;
1281 }
1282 
1283 /* Cleanup after this pass.  */
1284 
1285 static void
1286 tree_dce_done (bool aggressive)
1287 {
1288   if (aggressive)
1289     {
1290       int i;
1291 
1292       for (i = 0; i < last_basic_block; ++i)
1293 	BITMAP_FREE (control_dependence_map[i]);
1294       free (control_dependence_map);
1295 
1296       sbitmap_free (visited_control_parents);
1297       sbitmap_free (last_stmt_necessary);
1298       sbitmap_free (bb_contains_live_stmts);
1299       bb_contains_live_stmts = NULL;
1300     }
1301 
1302   sbitmap_free (processed);
1303 
1304   VEC_free (gimple, heap, worklist);
1305 }
1306 
1307 /* Main routine to eliminate dead code.
1308 
1309    AGGRESSIVE controls the aggressiveness of the algorithm.
1310    In conservative mode, we ignore control dependence and simply declare
1311    all but the most trivially dead branches necessary.  This mode is fast.
1312    In aggressive mode, control dependences are taken into account, which
1313    results in more dead code elimination, but at the cost of some time.
1314 
1315    FIXME: Aggressive mode before PRE doesn't work currently because
1316 	  the dominance info is not invalidated after DCE1.  This is
1317 	  not an issue right now because we only run aggressive DCE
1318 	  as the last tree SSA pass, but keep this in mind when you
1319 	  start experimenting with pass ordering.  */
1320 
1321 static unsigned int
1322 perform_tree_ssa_dce (bool aggressive)
1323 {
1324   struct edge_list *el = NULL;
1325   bool something_changed = 0;
1326 
1327   calculate_dominance_info (CDI_DOMINATORS);
1328 
1329   /* Preheaders are needed for SCEV to work.
1330      Simple lateches and recorded exits improve chances that loop will
1331      proved to be finite in testcases such as in loop-15.c and loop-24.c  */
1332   if (aggressive)
1333     loop_optimizer_init (LOOPS_NORMAL
1334 			 | LOOPS_HAVE_RECORDED_EXITS);
1335 
1336   tree_dce_init (aggressive);
1337 
1338   if (aggressive)
1339     {
1340       /* Compute control dependence.  */
1341       timevar_push (TV_CONTROL_DEPENDENCES);
1342       calculate_dominance_info (CDI_POST_DOMINATORS);
1343       el = create_edge_list ();
1344       find_all_control_dependences (el);
1345       timevar_pop (TV_CONTROL_DEPENDENCES);
1346 
1347       visited_control_parents = sbitmap_alloc (last_basic_block);
1348       sbitmap_zero (visited_control_parents);
1349 
1350       mark_dfs_back_edges ();
1351     }
1352 
1353   find_obviously_necessary_stmts (el);
1354 
1355   if (aggressive)
1356     loop_optimizer_finalize ();
1357 
1358   longest_chain = 0;
1359   total_chain = 0;
1360   nr_walks = 0;
1361   chain_ovfl = false;
1362   visited = BITMAP_ALLOC (NULL);
1363   propagate_necessity (el);
1364   BITMAP_FREE (visited);
1365 
1366   something_changed |= eliminate_unnecessary_stmts ();
1367   something_changed |= cfg_altered;
1368 
1369   /* We do not update postdominators, so free them unconditionally.  */
1370   free_dominance_info (CDI_POST_DOMINATORS);
1371 
1372   /* If we removed paths in the CFG, then we need to update
1373      dominators as well.  I haven't investigated the possibility
1374      of incrementally updating dominators.  */
1375   if (cfg_altered)
1376     free_dominance_info (CDI_DOMINATORS);
1377 
1378   statistics_counter_event (cfun, "Statements deleted", stats.removed);
1379   statistics_counter_event (cfun, "PHI nodes deleted", stats.removed_phis);
1380 
1381   /* Debugging dumps.  */
1382   if (dump_file && (dump_flags & (TDF_STATS|TDF_DETAILS)))
1383     print_stats ();
1384 
1385   tree_dce_done (aggressive);
1386 
1387   free_edge_list (el);
1388 
1389   if (something_changed)
1390     return (TODO_update_ssa | TODO_cleanup_cfg | TODO_ggc_collect
1391 	    | TODO_remove_unused_locals);
1392   else
1393     return 0;
1394 }
1395 
1396 /* Pass entry points.  */
1397 static unsigned int
1398 tree_ssa_dce (void)
1399 {
1400   return perform_tree_ssa_dce (/*aggressive=*/false);
1401 }
1402 
1403 static unsigned int
1404 tree_ssa_dce_loop (void)
1405 {
1406   unsigned int todo;
1407   todo = perform_tree_ssa_dce (/*aggressive=*/false);
1408   if (todo)
1409     {
1410       free_numbers_of_iterations_estimates ();
1411       scev_reset ();
1412     }
1413   return todo;
1414 }
1415 
1416 static unsigned int
1417 tree_ssa_cd_dce (void)
1418 {
1419   return perform_tree_ssa_dce (/*aggressive=*/optimize >= 2);
1420 }
1421 
1422 static bool
1423 gate_dce (void)
1424 {
1425   return flag_tree_dce != 0;
1426 }
1427 
1428 struct gimple_opt_pass pass_dce =
1429 {
1430  {
1431   GIMPLE_PASS,
1432   "dce",				/* name */
1433   gate_dce,				/* gate */
1434   tree_ssa_dce,				/* execute */
1435   NULL,					/* sub */
1436   NULL,					/* next */
1437   0,					/* static_pass_number */
1438   TV_TREE_DCE,				/* tv_id */
1439   PROP_cfg | PROP_ssa,			/* properties_required */
1440   0,					/* properties_provided */
1441   0,					/* properties_destroyed */
1442   0,					/* todo_flags_start */
1443   TODO_dump_func | TODO_verify_ssa	/* todo_flags_finish */
1444  }
1445 };
1446 
1447 struct gimple_opt_pass pass_dce_loop =
1448 {
1449  {
1450   GIMPLE_PASS,
1451   "dceloop",				/* name */
1452   gate_dce,				/* gate */
1453   tree_ssa_dce_loop,			/* execute */
1454   NULL,					/* sub */
1455   NULL,					/* next */
1456   0,					/* static_pass_number */
1457   TV_TREE_DCE,				/* tv_id */
1458   PROP_cfg | PROP_ssa,			/* properties_required */
1459   0,					/* properties_provided */
1460   0,					/* properties_destroyed */
1461   0,					/* todo_flags_start */
1462   TODO_dump_func | TODO_verify_ssa	/* todo_flags_finish */
1463  }
1464 };
1465 
1466 struct gimple_opt_pass pass_cd_dce =
1467 {
1468  {
1469   GIMPLE_PASS,
1470   "cddce",				/* name */
1471   gate_dce,				/* gate */
1472   tree_ssa_cd_dce,			/* execute */
1473   NULL,					/* sub */
1474   NULL,					/* next */
1475   0,					/* static_pass_number */
1476   TV_TREE_CD_DCE,			/* tv_id */
1477   PROP_cfg | PROP_ssa,			/* properties_required */
1478   0,					/* properties_provided */
1479   0,					/* properties_destroyed */
1480   0,					/* todo_flags_start */
1481   TODO_dump_func | TODO_verify_ssa
1482   | TODO_verify_flow			/* todo_flags_finish */
1483  }
1484 };
1485